1 /* Alias analysis for trees.
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
44 #include "errors.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
49 #include "builtins.h"
50
51 /* Broad overview of how alias analysis on gimple works:
52
53 Statements clobbering or using memory are linked through the
54 virtual operand factored use-def chain. The virtual operand
55 is unique per function, its symbol is accessible via gimple_vop (cfun).
56 Virtual operands are used for efficiently walking memory statements
57 in the gimple IL and are useful for things like value-numbering as
58 a generation count for memory references.
59
60 SSA_NAME pointers may have associated points-to information
61 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
62 points-to information is (re-)computed by the TODO_rebuild_alias
63 pass manager todo. Points-to information is also used for more
64 precise tracking of call-clobbered and call-used variables and
65 related disambiguations.
66
67 This file contains functions for disambiguating memory references,
68 the so called alias-oracle and tools for walking of the gimple IL.
69
70 The main alias-oracle entry-points are
71
72 bool stmt_may_clobber_ref_p (gimple *, tree)
73
74 This function queries if a statement may invalidate (parts of)
75 the memory designated by the reference tree argument.
76
77 bool ref_maybe_used_by_stmt_p (gimple *, tree)
78
79 This function queries if a statement may need (parts of) the
80 memory designated by the reference tree argument.
81
82 There are variants of these functions that only handle the call
83 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
84 Note that these do not disambiguate against a possible call lhs.
85
86 bool refs_may_alias_p (tree, tree)
87
88 This function tries to disambiguate two reference trees.
89
90 bool ptr_deref_may_alias_global_p (tree)
91
92 This function queries if dereferencing a pointer variable may
93 alias global memory.
94
95 More low-level disambiguators are available and documented in
96 this file. Low-level disambiguators dealing with points-to
97 information are in tree-ssa-structalias.cc. */
98
99 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
100 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
101
102 /* Query statistics for the different low-level disambiguators.
103 A high-level query may trigger multiple of them. */
104
105 static struct {
106 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
107 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
109 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
111 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
112 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
113 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
114 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
119 unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
120 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
121 unsigned HOST_WIDE_INT modref_use_may_alias;
122 unsigned HOST_WIDE_INT modref_use_no_alias;
123 unsigned HOST_WIDE_INT modref_clobber_may_alias;
124 unsigned HOST_WIDE_INT modref_clobber_no_alias;
125 unsigned HOST_WIDE_INT modref_kill_no;
126 unsigned HOST_WIDE_INT modref_kill_yes;
127 unsigned HOST_WIDE_INT modref_tests;
128 unsigned HOST_WIDE_INT modref_baseptr_tests;
129 } alias_stats;
130
131 void
dump_alias_stats(FILE * s)132 dump_alias_stats (FILE *s)
133 {
134 fprintf (s, "\nAlias oracle query stats:\n");
135 fprintf (s, " refs_may_alias_p: "
136 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
137 HOST_WIDE_INT_PRINT_DEC" queries\n",
138 alias_stats.refs_may_alias_p_no_alias,
139 alias_stats.refs_may_alias_p_no_alias
140 + alias_stats.refs_may_alias_p_may_alias);
141 fprintf (s, " ref_maybe_used_by_call_p: "
142 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
143 HOST_WIDE_INT_PRINT_DEC" queries\n",
144 alias_stats.ref_maybe_used_by_call_p_no_alias,
145 alias_stats.refs_may_alias_p_no_alias
146 + alias_stats.ref_maybe_used_by_call_p_may_alias);
147 fprintf (s, " call_may_clobber_ref_p: "
148 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
149 HOST_WIDE_INT_PRINT_DEC" queries\n",
150 alias_stats.call_may_clobber_ref_p_no_alias,
151 alias_stats.call_may_clobber_ref_p_no_alias
152 + alias_stats.call_may_clobber_ref_p_may_alias);
153 fprintf (s, " stmt_kills_ref_p: "
154 HOST_WIDE_INT_PRINT_DEC" kills, "
155 HOST_WIDE_INT_PRINT_DEC" queries\n",
156 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
157 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
158 + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
159 fprintf (s, " nonoverlapping_component_refs_p: "
160 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
161 HOST_WIDE_INT_PRINT_DEC" queries\n",
162 alias_stats.nonoverlapping_component_refs_p_no_alias,
163 alias_stats.nonoverlapping_component_refs_p_no_alias
164 + alias_stats.nonoverlapping_component_refs_p_may_alias);
165 fprintf (s, " nonoverlapping_refs_since_match_p: "
166 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
167 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
168 HOST_WIDE_INT_PRINT_DEC" queries\n",
169 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
170 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
171 alias_stats.nonoverlapping_refs_since_match_p_no_alias
172 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
173 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
174 fprintf (s, " aliasing_component_refs_p: "
175 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
176 HOST_WIDE_INT_PRINT_DEC" queries\n",
177 alias_stats.aliasing_component_refs_p_no_alias,
178 alias_stats.aliasing_component_refs_p_no_alias
179 + alias_stats.aliasing_component_refs_p_may_alias);
180 dump_alias_stats_in_alias_c (s);
181 fprintf (s, "\nModref stats:\n");
182 fprintf (s, " modref kill: "
183 HOST_WIDE_INT_PRINT_DEC" kills, "
184 HOST_WIDE_INT_PRINT_DEC" queries\n",
185 alias_stats.modref_kill_yes,
186 alias_stats.modref_kill_yes
187 + alias_stats.modref_kill_no);
188 fprintf (s, " modref use: "
189 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
190 HOST_WIDE_INT_PRINT_DEC" queries\n",
191 alias_stats.modref_use_no_alias,
192 alias_stats.modref_use_no_alias
193 + alias_stats.modref_use_may_alias);
194 fprintf (s, " modref clobber: "
195 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
196 HOST_WIDE_INT_PRINT_DEC" queries\n"
197 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
198 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
199 alias_stats.modref_clobber_no_alias,
200 alias_stats.modref_clobber_no_alias
201 + alias_stats.modref_clobber_may_alias,
202 alias_stats.modref_tests,
203 ((double)alias_stats.modref_tests)
204 / (alias_stats.modref_clobber_no_alias
205 + alias_stats.modref_clobber_may_alias),
206 alias_stats.modref_baseptr_tests,
207 ((double)alias_stats.modref_baseptr_tests)
208 / (alias_stats.modref_clobber_no_alias
209 + alias_stats.modref_clobber_may_alias));
210 }
211
212
213 /* Return true, if dereferencing PTR may alias with a global variable.
214 When ESCAPED_LOCAL_P is true escaped local memory is also considered
215 global. */
216
217 bool
ptr_deref_may_alias_global_p(tree ptr,bool escaped_local_p)218 ptr_deref_may_alias_global_p (tree ptr, bool escaped_local_p)
219 {
220 struct ptr_info_def *pi;
221
222 /* If we end up with a pointer constant here that may point
223 to global memory. */
224 if (TREE_CODE (ptr) != SSA_NAME)
225 return true;
226
227 pi = SSA_NAME_PTR_INFO (ptr);
228
229 /* If we do not have points-to information for this variable,
230 we have to punt. */
231 if (!pi)
232 return true;
233
234 /* ??? This does not use TBAA to prune globals ptr may not access. */
235 return pt_solution_includes_global (&pi->pt, escaped_local_p);
236 }
237
238 /* Return true if dereferencing PTR may alias DECL.
239 The caller is responsible for applying TBAA to see if PTR
240 may access DECL at all. */
241
242 static bool
ptr_deref_may_alias_decl_p(tree ptr,tree decl)243 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
244 {
245 struct ptr_info_def *pi;
246
247 /* Conversions are irrelevant for points-to information and
248 data-dependence analysis can feed us those. */
249 STRIP_NOPS (ptr);
250
251 /* Anything we do not explicilty handle aliases. */
252 if ((TREE_CODE (ptr) != SSA_NAME
253 && TREE_CODE (ptr) != ADDR_EXPR
254 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
255 || !POINTER_TYPE_P (TREE_TYPE (ptr))
256 || (!VAR_P (decl)
257 && TREE_CODE (decl) != PARM_DECL
258 && TREE_CODE (decl) != RESULT_DECL))
259 return true;
260
261 /* Disregard pointer offsetting. */
262 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
263 {
264 do
265 {
266 ptr = TREE_OPERAND (ptr, 0);
267 }
268 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
269 return ptr_deref_may_alias_decl_p (ptr, decl);
270 }
271
272 /* ADDR_EXPR pointers either just offset another pointer or directly
273 specify the pointed-to set. */
274 if (TREE_CODE (ptr) == ADDR_EXPR)
275 {
276 tree base = get_base_address (TREE_OPERAND (ptr, 0));
277 if (base
278 && (TREE_CODE (base) == MEM_REF
279 || TREE_CODE (base) == TARGET_MEM_REF))
280 ptr = TREE_OPERAND (base, 0);
281 else if (base
282 && DECL_P (base))
283 return compare_base_decls (base, decl) != 0;
284 else if (base
285 && CONSTANT_CLASS_P (base))
286 return false;
287 else
288 return true;
289 }
290
291 /* Non-aliased variables cannot be pointed to. */
292 if (!may_be_aliased (decl))
293 return false;
294
295 /* If we do not have useful points-to information for this pointer
296 we cannot disambiguate anything else. */
297 pi = SSA_NAME_PTR_INFO (ptr);
298 if (!pi)
299 return true;
300
301 return pt_solution_includes (&pi->pt, decl);
302 }
303
304 /* Return true if dereferenced PTR1 and PTR2 may alias.
305 The caller is responsible for applying TBAA to see if accesses
306 through PTR1 and PTR2 may conflict at all. */
307
308 bool
ptr_derefs_may_alias_p(tree ptr1,tree ptr2)309 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
310 {
311 struct ptr_info_def *pi1, *pi2;
312
313 /* Conversions are irrelevant for points-to information and
314 data-dependence analysis can feed us those. */
315 STRIP_NOPS (ptr1);
316 STRIP_NOPS (ptr2);
317
318 /* Disregard pointer offsetting. */
319 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
320 {
321 do
322 {
323 ptr1 = TREE_OPERAND (ptr1, 0);
324 }
325 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
326 return ptr_derefs_may_alias_p (ptr1, ptr2);
327 }
328 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
329 {
330 do
331 {
332 ptr2 = TREE_OPERAND (ptr2, 0);
333 }
334 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
335 return ptr_derefs_may_alias_p (ptr1, ptr2);
336 }
337
338 /* ADDR_EXPR pointers either just offset another pointer or directly
339 specify the pointed-to set. */
340 if (TREE_CODE (ptr1) == ADDR_EXPR)
341 {
342 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
343 if (base
344 && (TREE_CODE (base) == MEM_REF
345 || TREE_CODE (base) == TARGET_MEM_REF))
346 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
347 else if (base
348 && DECL_P (base))
349 return ptr_deref_may_alias_decl_p (ptr2, base);
350 else
351 return true;
352 }
353 if (TREE_CODE (ptr2) == ADDR_EXPR)
354 {
355 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
356 if (base
357 && (TREE_CODE (base) == MEM_REF
358 || TREE_CODE (base) == TARGET_MEM_REF))
359 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
360 else if (base
361 && DECL_P (base))
362 return ptr_deref_may_alias_decl_p (ptr1, base);
363 else
364 return true;
365 }
366
367 /* From here we require SSA name pointers. Anything else aliases. */
368 if (TREE_CODE (ptr1) != SSA_NAME
369 || TREE_CODE (ptr2) != SSA_NAME
370 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
371 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
372 return true;
373
374 /* We may end up with two empty points-to solutions for two same pointers.
375 In this case we still want to say both pointers alias, so shortcut
376 that here. */
377 if (ptr1 == ptr2)
378 return true;
379
380 /* If we do not have useful points-to information for either pointer
381 we cannot disambiguate anything else. */
382 pi1 = SSA_NAME_PTR_INFO (ptr1);
383 pi2 = SSA_NAME_PTR_INFO (ptr2);
384 if (!pi1 || !pi2)
385 return true;
386
387 /* ??? This does not use TBAA to prune decls from the intersection
388 that not both pointers may access. */
389 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
390 }
391
392 /* Return true if dereferencing PTR may alias *REF.
393 The caller is responsible for applying TBAA to see if PTR
394 may access *REF at all. */
395
396 static bool
ptr_deref_may_alias_ref_p_1(tree ptr,ao_ref * ref)397 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
398 {
399 tree base = ao_ref_base (ref);
400
401 if (TREE_CODE (base) == MEM_REF
402 || TREE_CODE (base) == TARGET_MEM_REF)
403 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
404 else if (DECL_P (base))
405 return ptr_deref_may_alias_decl_p (ptr, base);
406
407 return true;
408 }
409
410 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
411
412 bool
ptrs_compare_unequal(tree ptr1,tree ptr2)413 ptrs_compare_unequal (tree ptr1, tree ptr2)
414 {
415 /* First resolve the pointers down to a SSA name pointer base or
416 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
417 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
418 or STRING_CSTs which needs points-to adjustments to track them
419 in the points-to sets. */
420 tree obj1 = NULL_TREE;
421 tree obj2 = NULL_TREE;
422 if (TREE_CODE (ptr1) == ADDR_EXPR)
423 {
424 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
425 if (! tem)
426 return false;
427 if (VAR_P (tem)
428 || TREE_CODE (tem) == PARM_DECL
429 || TREE_CODE (tem) == RESULT_DECL)
430 obj1 = tem;
431 else if (TREE_CODE (tem) == MEM_REF)
432 ptr1 = TREE_OPERAND (tem, 0);
433 }
434 if (TREE_CODE (ptr2) == ADDR_EXPR)
435 {
436 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
437 if (! tem)
438 return false;
439 if (VAR_P (tem)
440 || TREE_CODE (tem) == PARM_DECL
441 || TREE_CODE (tem) == RESULT_DECL)
442 obj2 = tem;
443 else if (TREE_CODE (tem) == MEM_REF)
444 ptr2 = TREE_OPERAND (tem, 0);
445 }
446
447 /* Canonicalize ptr vs. object. */
448 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
449 {
450 std::swap (ptr1, ptr2);
451 std::swap (obj1, obj2);
452 }
453
454 if (obj1 && obj2)
455 /* Other code handles this correctly, no need to duplicate it here. */;
456 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
457 {
458 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
459 /* We may not use restrict to optimize pointer comparisons.
460 See PR71062. So we have to assume that restrict-pointed-to
461 may be in fact obj1. */
462 if (!pi
463 || pi->pt.vars_contains_restrict
464 || pi->pt.vars_contains_interposable)
465 return false;
466 if (VAR_P (obj1)
467 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
468 {
469 varpool_node *node = varpool_node::get (obj1);
470 /* If obj1 may bind to NULL give up (see below). */
471 if (! node
472 || ! node->nonzero_address ()
473 || ! decl_binds_to_current_def_p (obj1))
474 return false;
475 }
476 return !pt_solution_includes (&pi->pt, obj1);
477 }
478
479 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
480 but those require pt.null to be conservatively correct. */
481
482 return false;
483 }
484
485 /* Returns whether reference REF to BASE may refer to global memory.
486 When ESCAPED_LOCAL_P is true escaped local memory is also considered
487 global. */
488
489 static bool
ref_may_alias_global_p_1(tree base,bool escaped_local_p)490 ref_may_alias_global_p_1 (tree base, bool escaped_local_p)
491 {
492 if (DECL_P (base))
493 return (is_global_var (base)
494 || (escaped_local_p
495 && pt_solution_includes (&cfun->gimple_df->escaped, base)));
496 else if (TREE_CODE (base) == MEM_REF
497 || TREE_CODE (base) == TARGET_MEM_REF)
498 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0),
499 escaped_local_p);
500 return true;
501 }
502
503 bool
ref_may_alias_global_p(ao_ref * ref,bool escaped_local_p)504 ref_may_alias_global_p (ao_ref *ref, bool escaped_local_p)
505 {
506 tree base = ao_ref_base (ref);
507 return ref_may_alias_global_p_1 (base, escaped_local_p);
508 }
509
510 bool
ref_may_alias_global_p(tree ref,bool escaped_local_p)511 ref_may_alias_global_p (tree ref, bool escaped_local_p)
512 {
513 tree base = get_base_address (ref);
514 return ref_may_alias_global_p_1 (base, escaped_local_p);
515 }
516
517 /* Return true whether STMT may clobber global memory.
518 When ESCAPED_LOCAL_P is true escaped local memory is also considered
519 global. */
520
521 bool
stmt_may_clobber_global_p(gimple * stmt,bool escaped_local_p)522 stmt_may_clobber_global_p (gimple *stmt, bool escaped_local_p)
523 {
524 tree lhs;
525
526 if (!gimple_vdef (stmt))
527 return false;
528
529 /* ??? We can ask the oracle whether an artificial pointer
530 dereference with a pointer with points-to information covering
531 all global memory (what about non-address taken memory?) maybe
532 clobbered by this call. As there is at the moment no convenient
533 way of doing that without generating garbage do some manual
534 checking instead.
535 ??? We could make a NULL ao_ref argument to the various
536 predicates special, meaning any global memory. */
537
538 switch (gimple_code (stmt))
539 {
540 case GIMPLE_ASSIGN:
541 lhs = gimple_assign_lhs (stmt);
542 return (TREE_CODE (lhs) != SSA_NAME
543 && ref_may_alias_global_p (lhs, escaped_local_p));
544 case GIMPLE_CALL:
545 return true;
546 default:
547 return true;
548 }
549 }
550
551
552 /* Dump alias information on FILE. */
553
554 void
dump_alias_info(FILE * file)555 dump_alias_info (FILE *file)
556 {
557 unsigned i;
558 tree ptr;
559 const char *funcname
560 = lang_hooks.decl_printable_name (current_function_decl, 2);
561 tree var;
562
563 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
564
565 fprintf (file, "Aliased symbols\n\n");
566
567 FOR_EACH_LOCAL_DECL (cfun, i, var)
568 {
569 if (may_be_aliased (var))
570 dump_variable (file, var);
571 }
572
573 fprintf (file, "\nCall clobber information\n");
574
575 fprintf (file, "\nESCAPED");
576 dump_points_to_solution (file, &cfun->gimple_df->escaped);
577
578 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
579
580 FOR_EACH_SSA_NAME (i, ptr, cfun)
581 {
582 struct ptr_info_def *pi;
583
584 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
585 || SSA_NAME_IN_FREE_LIST (ptr))
586 continue;
587
588 pi = SSA_NAME_PTR_INFO (ptr);
589 if (pi)
590 dump_points_to_info_for (file, ptr);
591 }
592
593 fprintf (file, "\n");
594 }
595
596
597 /* Dump alias information on stderr. */
598
599 DEBUG_FUNCTION void
debug_alias_info(void)600 debug_alias_info (void)
601 {
602 dump_alias_info (stderr);
603 }
604
605
606 /* Dump the points-to set *PT into FILE. */
607
608 void
dump_points_to_solution(FILE * file,struct pt_solution * pt)609 dump_points_to_solution (FILE *file, struct pt_solution *pt)
610 {
611 if (pt->anything)
612 fprintf (file, ", points-to anything");
613
614 if (pt->nonlocal)
615 fprintf (file, ", points-to non-local");
616
617 if (pt->escaped)
618 fprintf (file, ", points-to escaped");
619
620 if (pt->ipa_escaped)
621 fprintf (file, ", points-to unit escaped");
622
623 if (pt->null)
624 fprintf (file, ", points-to NULL");
625
626 if (pt->vars)
627 {
628 fprintf (file, ", points-to vars: ");
629 dump_decl_set (file, pt->vars);
630 if (pt->vars_contains_nonlocal
631 || pt->vars_contains_escaped
632 || pt->vars_contains_escaped_heap
633 || pt->vars_contains_restrict)
634 {
635 const char *comma = "";
636 fprintf (file, " (");
637 if (pt->vars_contains_nonlocal)
638 {
639 fprintf (file, "nonlocal");
640 comma = ", ";
641 }
642 if (pt->vars_contains_escaped)
643 {
644 fprintf (file, "%sescaped", comma);
645 comma = ", ";
646 }
647 if (pt->vars_contains_escaped_heap)
648 {
649 fprintf (file, "%sescaped heap", comma);
650 comma = ", ";
651 }
652 if (pt->vars_contains_restrict)
653 {
654 fprintf (file, "%srestrict", comma);
655 comma = ", ";
656 }
657 if (pt->vars_contains_interposable)
658 fprintf (file, "%sinterposable", comma);
659 fprintf (file, ")");
660 }
661 }
662 }
663
664
665 /* Unified dump function for pt_solution. */
666
667 DEBUG_FUNCTION void
debug(pt_solution & ref)668 debug (pt_solution &ref)
669 {
670 dump_points_to_solution (stderr, &ref);
671 }
672
673 DEBUG_FUNCTION void
debug(pt_solution * ptr)674 debug (pt_solution *ptr)
675 {
676 if (ptr)
677 debug (*ptr);
678 else
679 fprintf (stderr, "<nil>\n");
680 }
681
682
683 /* Dump points-to information for SSA_NAME PTR into FILE. */
684
685 void
dump_points_to_info_for(FILE * file,tree ptr)686 dump_points_to_info_for (FILE *file, tree ptr)
687 {
688 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
689
690 print_generic_expr (file, ptr, dump_flags);
691
692 if (pi)
693 dump_points_to_solution (file, &pi->pt);
694 else
695 fprintf (file, ", points-to anything");
696
697 fprintf (file, "\n");
698 }
699
700
701 /* Dump points-to information for VAR into stderr. */
702
703 DEBUG_FUNCTION void
debug_points_to_info_for(tree var)704 debug_points_to_info_for (tree var)
705 {
706 dump_points_to_info_for (stderr, var);
707 }
708
709
710 /* Initializes the alias-oracle reference representation *R from REF. */
711
712 void
ao_ref_init(ao_ref * r,tree ref)713 ao_ref_init (ao_ref *r, tree ref)
714 {
715 r->ref = ref;
716 r->base = NULL_TREE;
717 r->offset = 0;
718 r->size = -1;
719 r->max_size = -1;
720 r->ref_alias_set = -1;
721 r->base_alias_set = -1;
722 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
723 }
724
725 /* Returns the base object of the memory reference *REF. */
726
727 tree
ao_ref_base(ao_ref * ref)728 ao_ref_base (ao_ref *ref)
729 {
730 bool reverse;
731
732 if (ref->base)
733 return ref->base;
734 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
735 &ref->max_size, &reverse);
736 return ref->base;
737 }
738
739 /* Returns the base object alias set of the memory reference *REF. */
740
741 alias_set_type
ao_ref_base_alias_set(ao_ref * ref)742 ao_ref_base_alias_set (ao_ref *ref)
743 {
744 tree base_ref;
745 if (ref->base_alias_set != -1)
746 return ref->base_alias_set;
747 if (!ref->ref)
748 return 0;
749 base_ref = ref->ref;
750 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
751 base_ref = TREE_OPERAND (base_ref, 0);
752 while (handled_component_p (base_ref))
753 base_ref = TREE_OPERAND (base_ref, 0);
754 ref->base_alias_set = get_alias_set (base_ref);
755 return ref->base_alias_set;
756 }
757
758 /* Returns the reference alias set of the memory reference *REF. */
759
760 alias_set_type
ao_ref_alias_set(ao_ref * ref)761 ao_ref_alias_set (ao_ref *ref)
762 {
763 if (ref->ref_alias_set != -1)
764 return ref->ref_alias_set;
765 if (!ref->ref)
766 return 0;
767 ref->ref_alias_set = get_alias_set (ref->ref);
768 return ref->ref_alias_set;
769 }
770
771 /* Returns a type satisfying
772 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
773
774 tree
ao_ref_base_alias_ptr_type(ao_ref * ref)775 ao_ref_base_alias_ptr_type (ao_ref *ref)
776 {
777 tree base_ref;
778
779 if (!ref->ref)
780 return NULL_TREE;
781 base_ref = ref->ref;
782 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
783 base_ref = TREE_OPERAND (base_ref, 0);
784 while (handled_component_p (base_ref))
785 base_ref = TREE_OPERAND (base_ref, 0);
786 tree ret = reference_alias_ptr_type (base_ref);
787 return ret;
788 }
789
790 /* Returns a type satisfying
791 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
792
793 tree
ao_ref_alias_ptr_type(ao_ref * ref)794 ao_ref_alias_ptr_type (ao_ref *ref)
795 {
796 if (!ref->ref)
797 return NULL_TREE;
798 tree ret = reference_alias_ptr_type (ref->ref);
799 return ret;
800 }
801
802 /* Return the alignment of the access *REF and store it in the *ALIGN
803 and *BITPOS pairs. Returns false if no alignment could be determined.
804 See get_object_alignment_2 for details. */
805
806 bool
ao_ref_alignment(ao_ref * ref,unsigned int * align,unsigned HOST_WIDE_INT * bitpos)807 ao_ref_alignment (ao_ref *ref, unsigned int *align,
808 unsigned HOST_WIDE_INT *bitpos)
809 {
810 if (ref->ref)
811 return get_object_alignment_1 (ref->ref, align, bitpos);
812
813 /* When we just have ref->base we cannot use get_object_alignment since
814 that will eventually use the type of the appearant access while for
815 example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
816 *align = BITS_PER_UNIT;
817 HOST_WIDE_INT offset;
818 if (!ref->offset.is_constant (&offset)
819 || !get_object_alignment_2 (ref->base, align, bitpos, true))
820 return false;
821 *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
822 *bitpos = *bitpos & (*align - 1);
823 return true;
824 }
825
826 /* Init an alias-oracle reference representation from a gimple pointer
827 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
828 that RANGE_KNOWN is set.
829
830 The access is assumed to be only to or after of the pointer target adjusted
831 by the offset, not before it (even in the case RANGE_KNOWN is false). */
832
833 void
ao_ref_init_from_ptr_and_range(ao_ref * ref,tree ptr,bool range_known,poly_int64 offset,poly_int64 size,poly_int64 max_size)834 ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
835 bool range_known,
836 poly_int64 offset,
837 poly_int64 size,
838 poly_int64 max_size)
839 {
840 poly_int64 t, extra_offset = 0;
841
842 ref->ref = NULL_TREE;
843 if (TREE_CODE (ptr) == SSA_NAME)
844 {
845 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
846 if (gimple_assign_single_p (stmt)
847 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
848 ptr = gimple_assign_rhs1 (stmt);
849 else if (is_gimple_assign (stmt)
850 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
851 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
852 {
853 ptr = gimple_assign_rhs1 (stmt);
854 extra_offset *= BITS_PER_UNIT;
855 }
856 }
857
858 if (TREE_CODE (ptr) == ADDR_EXPR)
859 {
860 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
861 if (ref->base)
862 ref->offset = BITS_PER_UNIT * t;
863 else
864 {
865 range_known = false;
866 ref->offset = 0;
867 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
868 }
869 }
870 else
871 {
872 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
873 ref->base = build2 (MEM_REF, char_type_node,
874 ptr, null_pointer_node);
875 ref->offset = 0;
876 }
877 ref->offset += extra_offset + offset;
878 if (range_known)
879 {
880 ref->max_size = max_size;
881 ref->size = size;
882 }
883 else
884 ref->max_size = ref->size = -1;
885 ref->ref_alias_set = 0;
886 ref->base_alias_set = 0;
887 ref->volatile_p = false;
888 }
889
890 /* Init an alias-oracle reference representation from a gimple pointer
891 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
892 size is assumed to be unknown. The access is assumed to be only
893 to or after of the pointer target, not before it. */
894
895 void
ao_ref_init_from_ptr_and_size(ao_ref * ref,tree ptr,tree size)896 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
897 {
898 poly_int64 size_hwi;
899 if (size
900 && poly_int_tree_p (size, &size_hwi)
901 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
902 {
903 size_hwi = size_hwi * BITS_PER_UNIT;
904 ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
905 }
906 else
907 ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
908 }
909
910 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
911 Return -1 if S1 < S2
912 Return 1 if S1 > S2
913 Return 0 if equal or incomparable. */
914
915 static int
compare_sizes(tree s1,tree s2)916 compare_sizes (tree s1, tree s2)
917 {
918 if (!s1 || !s2)
919 return 0;
920
921 poly_uint64 size1;
922 poly_uint64 size2;
923
924 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
925 return 0;
926 if (known_lt (size1, size2))
927 return -1;
928 if (known_lt (size2, size1))
929 return 1;
930 return 0;
931 }
932
933 /* Compare TYPE1 and TYPE2 by its size.
934 Return -1 if size of TYPE1 < size of TYPE2
935 Return 1 if size of TYPE1 > size of TYPE2
936 Return 0 if types are of equal sizes or we can not compare them. */
937
938 static int
compare_type_sizes(tree type1,tree type2)939 compare_type_sizes (tree type1, tree type2)
940 {
941 /* Be conservative for arrays and vectors. We want to support partial
942 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
943 while (TREE_CODE (type1) == ARRAY_TYPE
944 || TREE_CODE (type1) == VECTOR_TYPE)
945 type1 = TREE_TYPE (type1);
946 while (TREE_CODE (type2) == ARRAY_TYPE
947 || TREE_CODE (type2) == VECTOR_TYPE)
948 type2 = TREE_TYPE (type2);
949 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
950 }
951
952 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
953 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
954 decide. */
955
956 static inline int
same_type_for_tbaa(tree type1,tree type2)957 same_type_for_tbaa (tree type1, tree type2)
958 {
959 type1 = TYPE_MAIN_VARIANT (type1);
960 type2 = TYPE_MAIN_VARIANT (type2);
961
962 /* Handle the most common case first. */
963 if (type1 == type2)
964 return 1;
965
966 /* If we would have to do structural comparison bail out. */
967 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
968 || TYPE_STRUCTURAL_EQUALITY_P (type2))
969 return -1;
970
971 /* Compare the canonical types. */
972 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
973 return 1;
974
975 /* ??? Array types are not properly unified in all cases as we have
976 spurious changes in the index types for example. Removing this
977 causes all sorts of problems with the Fortran frontend. */
978 if (TREE_CODE (type1) == ARRAY_TYPE
979 && TREE_CODE (type2) == ARRAY_TYPE)
980 return -1;
981
982 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
983 object of one of its constrained subtypes, e.g. when a function with an
984 unconstrained parameter passed by reference is called on an object and
985 inlined. But, even in the case of a fixed size, type and subtypes are
986 not equivalent enough as to share the same TYPE_CANONICAL, since this
987 would mean that conversions between them are useless, whereas they are
988 not (e.g. type and subtypes can have different modes). So, in the end,
989 they are only guaranteed to have the same alias set. */
990 alias_set_type set1 = get_alias_set (type1);
991 alias_set_type set2 = get_alias_set (type2);
992 if (set1 == set2)
993 return -1;
994
995 /* Pointers to void are considered compatible with all other pointers,
996 so for two pointers see what the alias set resolution thinks. */
997 if (POINTER_TYPE_P (type1)
998 && POINTER_TYPE_P (type2)
999 && alias_sets_conflict_p (set1, set2))
1000 return -1;
1001
1002 /* The types are known to be not equal. */
1003 return 0;
1004 }
1005
1006 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
1007 components on it). */
1008
1009 static bool
type_has_components_p(tree type)1010 type_has_components_p (tree type)
1011 {
1012 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
1013 || TREE_CODE (type) == COMPLEX_TYPE;
1014 }
1015
1016 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1017 respectively are either pointing to same address or are completely
1018 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1019 just partly overlap.
1020
1021 Try to disambiguate using the access path starting from the match
1022 and return false if there is no conflict.
1023
1024 Helper for aliasing_component_refs_p. */
1025
1026 static bool
aliasing_matching_component_refs_p(tree match1,tree ref1,poly_int64 offset1,poly_int64 max_size1,tree match2,tree ref2,poly_int64 offset2,poly_int64 max_size2,bool partial_overlap)1027 aliasing_matching_component_refs_p (tree match1, tree ref1,
1028 poly_int64 offset1, poly_int64 max_size1,
1029 tree match2, tree ref2,
1030 poly_int64 offset2, poly_int64 max_size2,
1031 bool partial_overlap)
1032 {
1033 poly_int64 offadj, sztmp, msztmp;
1034 bool reverse;
1035
1036 if (!partial_overlap)
1037 {
1038 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1039 offset2 -= offadj;
1040 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1041 offset1 -= offadj;
1042 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1043 {
1044 ++alias_stats.aliasing_component_refs_p_no_alias;
1045 return false;
1046 }
1047 }
1048
1049 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1050 partial_overlap);
1051 if (cmp == 1
1052 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1053 {
1054 ++alias_stats.aliasing_component_refs_p_no_alias;
1055 return false;
1056 }
1057 ++alias_stats.aliasing_component_refs_p_may_alias;
1058 return true;
1059 }
1060
1061 /* Return true if REF is reference to zero sized trailing array. I.e.
1062 struct foo {int bar; int array[0];} *fooptr;
1063 fooptr->array. */
1064
1065 static bool
component_ref_to_zero_sized_trailing_array_p(tree ref)1066 component_ref_to_zero_sized_trailing_array_p (tree ref)
1067 {
1068 return (TREE_CODE (ref) == COMPONENT_REF
1069 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1070 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1071 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1072 && array_at_struct_end_p (ref));
1073 }
1074
1075 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1076 aliasing_component_refs_p.
1077
1078 Walk access path REF2 and try to find type matching TYPE1
1079 (which is a start of possibly aliasing access path REF1).
1080 If match is found, try to disambiguate.
1081
1082 Return 0 for sucessful disambiguation.
1083 Return 1 if match was found but disambiguation failed
1084 Return -1 if there is no match.
1085 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1086 in access patch REF2 and -1 if we are not sure. */
1087
1088 static int
aliasing_component_refs_walk(tree ref1,tree type1,tree base1,poly_int64 offset1,poly_int64 max_size1,tree end_struct_ref1,tree ref2,tree base2,poly_int64 offset2,poly_int64 max_size2,bool * maybe_match)1089 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1090 poly_int64 offset1, poly_int64 max_size1,
1091 tree end_struct_ref1,
1092 tree ref2, tree base2,
1093 poly_int64 offset2, poly_int64 max_size2,
1094 bool *maybe_match)
1095 {
1096 tree ref = ref2;
1097 int same_p = 0;
1098
1099 while (true)
1100 {
1101 /* We walk from inner type to the outer types. If type we see is
1102 already too large to be part of type1, terminate the search. */
1103 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1104
1105 if (cmp < 0
1106 && (!end_struct_ref1
1107 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1108 TREE_TYPE (ref)) < 0))
1109 break;
1110 /* If types may be of same size, see if we can decide about their
1111 equality. */
1112 if (cmp == 0)
1113 {
1114 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1115 if (same_p == 1)
1116 break;
1117 /* In case we can't decide whether types are same try to
1118 continue looking for the exact match.
1119 Remember however that we possibly saw a match
1120 to bypass the access path continuations tests we do later. */
1121 if (same_p == -1)
1122 *maybe_match = true;
1123 }
1124 if (!handled_component_p (ref))
1125 break;
1126 ref = TREE_OPERAND (ref, 0);
1127 }
1128 if (same_p == 1)
1129 {
1130 bool partial_overlap = false;
1131
1132 /* We assume that arrays can overlap by multiple of their elements
1133 size as tested in gcc.dg/torture/alias-2.c.
1134 This partial overlap happen only when both arrays are bases of
1135 the access and not contained within another component ref.
1136 To be safe we also assume partial overlap for VLAs. */
1137 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1138 && (!TYPE_SIZE (TREE_TYPE (base1))
1139 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1140 || ref == base2))
1141 {
1142 /* Setting maybe_match to true triggers
1143 nonoverlapping_component_refs_p test later that still may do
1144 useful disambiguation. */
1145 *maybe_match = true;
1146 partial_overlap = true;
1147 }
1148 return aliasing_matching_component_refs_p (base1, ref1,
1149 offset1, max_size1,
1150 ref, ref2,
1151 offset2, max_size2,
1152 partial_overlap);
1153 }
1154 return -1;
1155 }
1156
1157 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1158 Return true if they can be composed to single access path
1159 base1...ref1...base2...ref2.
1160
1161 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1162 a trailing array access after REF1 in the non-TBAA part of the access.
1163 REF1_ALIAS_SET is the alias set of REF1.
1164
1165 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1166 a trailing array access in the TBAA part of access path2.
1167 BASE2_ALIAS_SET is the alias set of base2. */
1168
1169 bool
access_path_may_continue_p(tree ref_type1,bool end_struct_past_end1,alias_set_type ref1_alias_set,tree base_type2,tree end_struct_ref2,alias_set_type base2_alias_set)1170 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1171 alias_set_type ref1_alias_set,
1172 tree base_type2, tree end_struct_ref2,
1173 alias_set_type base2_alias_set)
1174 {
1175 /* Access path can not continue past types with no components. */
1176 if (!type_has_components_p (ref_type1))
1177 return false;
1178
1179 /* If first access path ends by too small type to hold base of
1180 the second access path, typically paths can not continue.
1181
1182 Punt if end_struct_past_end1 is true. We want to support arbitrary
1183 type puning past first COMPONENT_REF to union because redundant store
1184 elimination depends on this, see PR92152. For this reason we can not
1185 check size of the reference because types may partially overlap. */
1186 if (!end_struct_past_end1)
1187 {
1188 if (compare_type_sizes (ref_type1, base_type2) < 0)
1189 return false;
1190 /* If the path2 contains trailing array access we can strenghten the check
1191 to verify that also the size of element of the trailing array fits.
1192 In fact we could check for offset + type_size, but we do not track
1193 offsets and this is quite side case. */
1194 if (end_struct_ref2
1195 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1196 return false;
1197 }
1198 return (base2_alias_set == ref1_alias_set
1199 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1200 }
1201
1202 /* Determine if the two component references REF1 and REF2 which are
1203 based on access types TYPE1 and TYPE2 and of which at least one is based
1204 on an indirect reference may alias.
1205 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1206 are the respective alias sets. */
1207
1208 static bool
aliasing_component_refs_p(tree ref1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,poly_int64 offset1,poly_int64 max_size1,tree ref2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,poly_int64 offset2,poly_int64 max_size2)1209 aliasing_component_refs_p (tree ref1,
1210 alias_set_type ref1_alias_set,
1211 alias_set_type base1_alias_set,
1212 poly_int64 offset1, poly_int64 max_size1,
1213 tree ref2,
1214 alias_set_type ref2_alias_set,
1215 alias_set_type base2_alias_set,
1216 poly_int64 offset2, poly_int64 max_size2)
1217 {
1218 /* If one reference is a component references through pointers try to find a
1219 common base and apply offset based disambiguation. This handles
1220 for example
1221 struct A { int i; int j; } *q;
1222 struct B { struct A a; int k; } *p;
1223 disambiguating q->i and p->a.j. */
1224 tree base1, base2;
1225 tree type1, type2;
1226 bool maybe_match = false;
1227 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1228 bool end_struct_past_end1 = false;
1229 bool end_struct_past_end2 = false;
1230
1231 /* Choose bases and base types to search for.
1232 The access path is as follows:
1233 base....end_of_tbaa_ref...actual_ref
1234 At one place in the access path may be a reference to zero sized or
1235 trailing array.
1236
1237 We generally discard the segment after end_of_tbaa_ref however
1238 we need to be careful in case it contains zero sized or trailing array.
1239 These may happen after reference to union and in this case we need to
1240 not disambiguate type puning scenarios.
1241
1242 We set:
1243 base1 to point to base
1244
1245 ref1 to point to end_of_tbaa_ref
1246
1247 end_struct_ref1 to point the trailing reference (if it exists
1248 in range base....end_of_tbaa_ref
1249
1250 end_struct_past_end1 is true if this trailing reference occurs in
1251 end_of_tbaa_ref...actual_ref. */
1252 base1 = ref1;
1253 while (handled_component_p (base1))
1254 {
1255 /* Generally access paths are monotous in the size of object. The
1256 exception are trailing arrays of structures. I.e.
1257 struct a {int array[0];};
1258 or
1259 struct a {int array1[0]; int array[];};
1260 Such struct has size 0 but accesses to a.array may have non-zero size.
1261 In this case the size of TREE_TYPE (base1) is smaller than
1262 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1263
1264 Because we compare sizes of arrays just by sizes of their elements,
1265 we only need to care about zero sized array fields here. */
1266 if (component_ref_to_zero_sized_trailing_array_p (base1))
1267 {
1268 gcc_checking_assert (!end_struct_ref1);
1269 end_struct_ref1 = base1;
1270 }
1271 if (ends_tbaa_access_path_p (base1))
1272 {
1273 ref1 = TREE_OPERAND (base1, 0);
1274 if (end_struct_ref1)
1275 {
1276 end_struct_past_end1 = true;
1277 end_struct_ref1 = NULL;
1278 }
1279 }
1280 base1 = TREE_OPERAND (base1, 0);
1281 }
1282 type1 = TREE_TYPE (base1);
1283 base2 = ref2;
1284 while (handled_component_p (base2))
1285 {
1286 if (component_ref_to_zero_sized_trailing_array_p (base2))
1287 {
1288 gcc_checking_assert (!end_struct_ref2);
1289 end_struct_ref2 = base2;
1290 }
1291 if (ends_tbaa_access_path_p (base2))
1292 {
1293 ref2 = TREE_OPERAND (base2, 0);
1294 if (end_struct_ref2)
1295 {
1296 end_struct_past_end2 = true;
1297 end_struct_ref2 = NULL;
1298 }
1299 }
1300 base2 = TREE_OPERAND (base2, 0);
1301 }
1302 type2 = TREE_TYPE (base2);
1303
1304 /* Now search for the type1 in the access path of ref2. This
1305 would be a common base for doing offset based disambiguation on.
1306 This however only makes sense if type2 is big enough to hold type1. */
1307 int cmp_outer = compare_type_sizes (type2, type1);
1308
1309 /* If type2 is big enough to contain type1 walk its access path.
1310 We also need to care of arrays at the end of structs that may extend
1311 beyond the end of structure. If this occurs in the TBAA part of the
1312 access path, we need to consider the increased type as well. */
1313 if (cmp_outer >= 0
1314 || (end_struct_ref2
1315 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1316 {
1317 int res = aliasing_component_refs_walk (ref1, type1, base1,
1318 offset1, max_size1,
1319 end_struct_ref1,
1320 ref2, base2, offset2, max_size2,
1321 &maybe_match);
1322 if (res != -1)
1323 return res;
1324 }
1325
1326 /* If we didn't find a common base, try the other way around. */
1327 if (cmp_outer <= 0
1328 || (end_struct_ref1
1329 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type2) <= 0))
1330 {
1331 int res = aliasing_component_refs_walk (ref2, type2, base2,
1332 offset2, max_size2,
1333 end_struct_ref2,
1334 ref1, base1, offset1, max_size1,
1335 &maybe_match);
1336 if (res != -1)
1337 return res;
1338 }
1339
1340 /* In the following code we make an assumption that the types in access
1341 paths do not overlap and thus accesses alias only if one path can be
1342 continuation of another. If we was not able to decide about equivalence,
1343 we need to give up. */
1344 if (maybe_match)
1345 {
1346 if (!nonoverlapping_component_refs_p (ref1, ref2))
1347 {
1348 ++alias_stats.aliasing_component_refs_p_may_alias;
1349 return true;
1350 }
1351 ++alias_stats.aliasing_component_refs_p_no_alias;
1352 return false;
1353 }
1354
1355 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1356 ref1_alias_set,
1357 type2, end_struct_ref2,
1358 base2_alias_set)
1359 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1360 ref2_alias_set,
1361 type1, end_struct_ref1,
1362 base1_alias_set))
1363 {
1364 ++alias_stats.aliasing_component_refs_p_may_alias;
1365 return true;
1366 }
1367 ++alias_stats.aliasing_component_refs_p_no_alias;
1368 return false;
1369 }
1370
1371 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1372 that bases of both component refs are either equivalent or nonoverlapping.
1373 We do not assume that the containers of FIELD1 and FIELD2 are of the
1374 same type or size.
1375
1376 Return 0 in case the base address of component_refs are same then
1377 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1378 may not be of same type or size.
1379
1380 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1381
1382 Return -1 otherwise.
1383
1384 Main difference between 0 and -1 is to let
1385 nonoverlapping_component_refs_since_match_p discover the semantically
1386 equivalent part of the access path.
1387
1388 Note that this function is used even with -fno-strict-aliasing
1389 and makes use of no TBAA assumptions. */
1390
1391 static int
nonoverlapping_component_refs_p_1(const_tree field1,const_tree field2)1392 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1393 {
1394 /* If both fields are of the same type, we could save hard work of
1395 comparing offsets. */
1396 tree type1 = DECL_CONTEXT (field1);
1397 tree type2 = DECL_CONTEXT (field2);
1398
1399 if (TREE_CODE (type1) == RECORD_TYPE
1400 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1401 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1402 if (TREE_CODE (type2) == RECORD_TYPE
1403 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1404 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1405
1406 /* ??? Bitfields can overlap at RTL level so punt on them.
1407 FIXME: RTL expansion should be fixed by adjusting the access path
1408 when producing MEM_ATTRs for MEMs which are wider than
1409 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1410 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1411 return -1;
1412
1413 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1414 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1415 return field1 != field2;
1416
1417 /* In common case the offsets and bit offsets will be the same.
1418 However if frontends do not agree on the alignment, they may be
1419 different even if they actually represent same address.
1420 Try the common case first and if that fails calcualte the
1421 actual bit offset. */
1422 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1423 DECL_FIELD_OFFSET (field2))
1424 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1425 DECL_FIELD_BIT_OFFSET (field2)))
1426 return 0;
1427
1428 /* Note that it may be possible to use component_ref_field_offset
1429 which would provide offsets as trees. However constructing and folding
1430 trees is expensive and does not seem to be worth the compile time
1431 cost. */
1432
1433 poly_uint64 offset1, offset2;
1434 poly_uint64 bit_offset1, bit_offset2;
1435
1436 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1437 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1438 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1439 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1440 {
1441 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1442 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1443
1444 if (known_eq (offset1, offset2))
1445 return 0;
1446
1447 poly_uint64 size1, size2;
1448
1449 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1450 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1451 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1452 return 1;
1453 }
1454 /* Resort to slower overlap checking by looking for matching types in
1455 the middle of access path. */
1456 return -1;
1457 }
1458
1459 /* Return low bound of array. Do not produce new trees
1460 and thus do not care about particular type of integer constant
1461 and placeholder exprs. */
1462
1463 static tree
cheap_array_ref_low_bound(tree ref)1464 cheap_array_ref_low_bound (tree ref)
1465 {
1466 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1467
1468 /* Avoid expensive array_ref_low_bound.
1469 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1470 type or it is zero. */
1471 if (TREE_OPERAND (ref, 2))
1472 return TREE_OPERAND (ref, 2);
1473 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1474 return TYPE_MIN_VALUE (domain_type);
1475 else
1476 return integer_zero_node;
1477 }
1478
1479 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1480 completely disjoint.
1481
1482 Return 1 if the refs are non-overlapping.
1483 Return 0 if they are possibly overlapping but if so the overlap again
1484 starts on the same address.
1485 Return -1 otherwise. */
1486
1487 int
nonoverlapping_array_refs_p(tree ref1,tree ref2)1488 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1489 {
1490 tree index1 = TREE_OPERAND (ref1, 1);
1491 tree index2 = TREE_OPERAND (ref2, 1);
1492 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1493 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1494
1495 /* Handle zero offsets first: we do not need to match type size in this
1496 case. */
1497 if (operand_equal_p (index1, low_bound1, 0)
1498 && operand_equal_p (index2, low_bound2, 0))
1499 return 0;
1500
1501 /* If type sizes are different, give up.
1502
1503 Avoid expensive array_ref_element_size.
1504 If operand 3 is present it denotes size in the alignmnet units.
1505 Otherwise size is TYPE_SIZE of the element type.
1506 Handle only common cases where types are of the same "kind". */
1507 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1508 return -1;
1509
1510 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1511 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1512
1513 if (TREE_OPERAND (ref1, 3))
1514 {
1515 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1516 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1517 TREE_OPERAND (ref2, 3), 0))
1518 return -1;
1519 }
1520 else
1521 {
1522 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1523 TYPE_SIZE_UNIT (elmt_type2), 0))
1524 return -1;
1525 }
1526
1527 /* Since we know that type sizes are the same, there is no need to return
1528 -1 after this point. Partial overlap can not be introduced. */
1529
1530 /* We may need to fold trees in this case.
1531 TODO: Handle integer constant case at least. */
1532 if (!operand_equal_p (low_bound1, low_bound2, 0))
1533 return 0;
1534
1535 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1536 {
1537 if (tree_int_cst_equal (index1, index2))
1538 return 0;
1539 return 1;
1540 }
1541 /* TODO: We can use VRP to further disambiguate here. */
1542 return 0;
1543 }
1544
1545 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1546 MATCH2 either point to the same address or are disjoint.
1547 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1548 respectively or NULL in the case we established equivalence of bases.
1549 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1550 overlap by exact multiply of their element size.
1551
1552 This test works by matching the initial segment of the access path
1553 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1554 match was determined without use of TBAA oracle.
1555
1556 Return 1 if we can determine that component references REF1 and REF2,
1557 that are within a common DECL, cannot overlap.
1558
1559 Return 0 if paths are same and thus there is nothing to disambiguate more
1560 (i.e. there is must alias assuming there is must alias between MATCH1 and
1561 MATCH2)
1562
1563 Return -1 if we can not determine 0 or 1 - this happens when we met
1564 non-matching types was met in the path.
1565 In this case it may make sense to continue by other disambiguation
1566 oracles. */
1567
1568 static int
nonoverlapping_refs_since_match_p(tree match1,tree ref1,tree match2,tree ref2,bool partial_overlap)1569 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1570 tree match2, tree ref2,
1571 bool partial_overlap)
1572 {
1573 int ntbaa1 = 0, ntbaa2 = 0;
1574 /* Early return if there are no references to match, we do not need
1575 to walk the access paths.
1576
1577 Do not consider this as may-alias for stats - it is more useful
1578 to have information how many disambiguations happened provided that
1579 the query was meaningful. */
1580
1581 if (match1 == ref1 || !handled_component_p (ref1)
1582 || match2 == ref2 || !handled_component_p (ref2))
1583 return -1;
1584
1585 auto_vec<tree, 16> component_refs1;
1586 auto_vec<tree, 16> component_refs2;
1587
1588 /* Create the stack of handled components for REF1. */
1589 while (handled_component_p (ref1) && ref1 != match1)
1590 {
1591 /* We use TBAA only to re-synchronize after mismatched refs. So we
1592 do not need to truncate access path after TBAA part ends. */
1593 if (ends_tbaa_access_path_p (ref1))
1594 ntbaa1 = 0;
1595 else
1596 ntbaa1++;
1597 component_refs1.safe_push (ref1);
1598 ref1 = TREE_OPERAND (ref1, 0);
1599 }
1600
1601 /* Create the stack of handled components for REF2. */
1602 while (handled_component_p (ref2) && ref2 != match2)
1603 {
1604 if (ends_tbaa_access_path_p (ref2))
1605 ntbaa2 = 0;
1606 else
1607 ntbaa2++;
1608 component_refs2.safe_push (ref2);
1609 ref2 = TREE_OPERAND (ref2, 0);
1610 }
1611
1612 if (!flag_strict_aliasing)
1613 {
1614 ntbaa1 = 0;
1615 ntbaa2 = 0;
1616 }
1617
1618 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1619 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1620
1621 /* If only one of access path starts with MEM_REF check that offset is 0
1622 so the addresses stays the same after stripping it.
1623 TODO: In this case we may walk the other access path until we get same
1624 offset.
1625
1626 If both starts with MEM_REF, offset has to be same. */
1627 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1628 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1629 || (mem_ref1 && mem_ref2
1630 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1631 TREE_OPERAND (ref2, 1))))
1632 {
1633 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1634 return -1;
1635 }
1636
1637 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1638 to handle them here at all. */
1639 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1640 && TREE_CODE (ref2) != TARGET_MEM_REF);
1641
1642 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1643 rank. This is sufficient because we start from the same DECL and you
1644 cannot reference several fields at a time with COMPONENT_REFs (unlike
1645 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1646 of them to access a sub-component, unless you're in a union, in which
1647 case the return value will precisely be false. */
1648 while (true)
1649 {
1650 /* Track if we seen unmatched ref with non-zero offset. In this case
1651 we must look for partial overlaps. */
1652 bool seen_unmatched_ref_p = false;
1653
1654 /* First match ARRAY_REFs an try to disambiguate. */
1655 if (!component_refs1.is_empty ()
1656 && !component_refs2.is_empty ())
1657 {
1658 unsigned int narray_refs1=0, narray_refs2=0;
1659
1660 /* We generally assume that both access paths starts by same sequence
1661 of refs. However if number of array refs is not in sync, try
1662 to recover and pop elts until number match. This helps the case
1663 where one access path starts by array and other by element. */
1664 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1665 narray_refs1++)
1666 if (TREE_CODE (component_refs1 [component_refs1.length()
1667 - 1 - narray_refs1]) != ARRAY_REF)
1668 break;
1669
1670 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1671 narray_refs2++)
1672 if (TREE_CODE (component_refs2 [component_refs2.length()
1673 - 1 - narray_refs2]) != ARRAY_REF)
1674 break;
1675 for (; narray_refs1 > narray_refs2; narray_refs1--)
1676 {
1677 ref1 = component_refs1.pop ();
1678 ntbaa1--;
1679
1680 /* If index is non-zero we need to check whether the reference
1681 does not break the main invariant that bases are either
1682 disjoint or equal. Consider the example:
1683
1684 unsigned char out[][1];
1685 out[1]="a";
1686 out[i][0];
1687
1688 Here bases out and out are same, but after removing the
1689 [i] index, this invariant no longer holds, because
1690 out[i] points to the middle of array out.
1691
1692 TODO: If size of type of the skipped reference is an integer
1693 multiply of the size of type of the other reference this
1694 invariant can be verified, but even then it is not completely
1695 safe with !flag_strict_aliasing if the other reference contains
1696 unbounded array accesses.
1697 See */
1698
1699 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1700 cheap_array_ref_low_bound (ref1), 0))
1701 return 0;
1702 }
1703 for (; narray_refs2 > narray_refs1; narray_refs2--)
1704 {
1705 ref2 = component_refs2.pop ();
1706 ntbaa2--;
1707 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1708 cheap_array_ref_low_bound (ref2), 0))
1709 return 0;
1710 }
1711 /* Try to disambiguate matched arrays. */
1712 for (unsigned int i = 0; i < narray_refs1; i++)
1713 {
1714 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1715 component_refs2.pop ());
1716 ntbaa1--;
1717 ntbaa2--;
1718 if (cmp == 1 && !partial_overlap)
1719 {
1720 ++alias_stats
1721 .nonoverlapping_refs_since_match_p_no_alias;
1722 return 1;
1723 }
1724 if (cmp == -1)
1725 {
1726 seen_unmatched_ref_p = true;
1727 /* We can not maintain the invariant that bases are either
1728 same or completely disjoint. However we can still recover
1729 from type based alias analysis if we reach references to
1730 same sizes. We do not attempt to match array sizes, so
1731 just finish array walking and look for component refs. */
1732 if (ntbaa1 < 0 || ntbaa2 < 0)
1733 {
1734 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1735 return -1;
1736 }
1737 for (i++; i < narray_refs1; i++)
1738 {
1739 component_refs1.pop ();
1740 component_refs2.pop ();
1741 ntbaa1--;
1742 ntbaa2--;
1743 }
1744 break;
1745 }
1746 partial_overlap = false;
1747 }
1748 }
1749
1750 /* Next look for component_refs. */
1751 do
1752 {
1753 if (component_refs1.is_empty ())
1754 {
1755 ++alias_stats
1756 .nonoverlapping_refs_since_match_p_must_overlap;
1757 return 0;
1758 }
1759 ref1 = component_refs1.pop ();
1760 ntbaa1--;
1761 if (TREE_CODE (ref1) != COMPONENT_REF)
1762 {
1763 seen_unmatched_ref_p = true;
1764 if (ntbaa1 < 0 || ntbaa2 < 0)
1765 {
1766 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1767 return -1;
1768 }
1769 }
1770 }
1771 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1772
1773 do
1774 {
1775 if (component_refs2.is_empty ())
1776 {
1777 ++alias_stats
1778 .nonoverlapping_refs_since_match_p_must_overlap;
1779 return 0;
1780 }
1781 ref2 = component_refs2.pop ();
1782 ntbaa2--;
1783 if (TREE_CODE (ref2) != COMPONENT_REF)
1784 {
1785 if (ntbaa1 < 0 || ntbaa2 < 0)
1786 {
1787 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1788 return -1;
1789 }
1790 seen_unmatched_ref_p = true;
1791 }
1792 }
1793 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1794
1795 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1796 earlier. */
1797 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1798 && TREE_CODE (ref2) == COMPONENT_REF);
1799
1800 tree field1 = TREE_OPERAND (ref1, 1);
1801 tree field2 = TREE_OPERAND (ref2, 1);
1802
1803 /* ??? We cannot simply use the type of operand #0 of the refs here
1804 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1805 for common blocks instead of using unions like everyone else. */
1806 tree type1 = DECL_CONTEXT (field1);
1807 tree type2 = DECL_CONTEXT (field2);
1808
1809 partial_overlap = false;
1810
1811 /* If we skipped array refs on type of different sizes, we can
1812 no longer be sure that there are not partial overlaps. */
1813 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1814 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1815 {
1816 ++alias_stats
1817 .nonoverlapping_refs_since_match_p_may_alias;
1818 return -1;
1819 }
1820
1821 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1822 if (cmp == -1)
1823 {
1824 ++alias_stats
1825 .nonoverlapping_refs_since_match_p_may_alias;
1826 return -1;
1827 }
1828 else if (cmp == 1)
1829 {
1830 ++alias_stats
1831 .nonoverlapping_refs_since_match_p_no_alias;
1832 return 1;
1833 }
1834 }
1835 }
1836
1837 /* Return TYPE_UID which can be used to match record types we consider
1838 same for TBAA purposes. */
1839
1840 static inline int
ncr_type_uid(const_tree field)1841 ncr_type_uid (const_tree field)
1842 {
1843 /* ??? We cannot simply use the type of operand #0 of the refs here
1844 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1845 for common blocks instead of using unions like everyone else. */
1846 tree type = DECL_FIELD_CONTEXT (field);
1847 /* With LTO types considered same_type_for_tbaa_p
1848 from different translation unit may not have same
1849 main variant. They however have same TYPE_CANONICAL. */
1850 if (TYPE_CANONICAL (type))
1851 return TYPE_UID (TYPE_CANONICAL (type));
1852 return TYPE_UID (type);
1853 }
1854
1855 /* qsort compare function to sort FIELD_DECLs after their
1856 DECL_FIELD_CONTEXT TYPE_UID. */
1857
1858 static inline int
ncr_compar(const void * field1_,const void * field2_)1859 ncr_compar (const void *field1_, const void *field2_)
1860 {
1861 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1862 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1863 unsigned int uid1 = ncr_type_uid (field1);
1864 unsigned int uid2 = ncr_type_uid (field2);
1865
1866 if (uid1 < uid2)
1867 return -1;
1868 else if (uid1 > uid2)
1869 return 1;
1870 return 0;
1871 }
1872
1873 /* Return true if we can determine that the fields referenced cannot
1874 overlap for any pair of objects. This relies on TBAA. */
1875
1876 static bool
nonoverlapping_component_refs_p(const_tree x,const_tree y)1877 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1878 {
1879 /* Early return if we have nothing to do.
1880
1881 Do not consider this as may-alias for stats - it is more useful
1882 to have information how many disambiguations happened provided that
1883 the query was meaningful. */
1884 if (!flag_strict_aliasing
1885 || !x || !y
1886 || !handled_component_p (x)
1887 || !handled_component_p (y))
1888 return false;
1889
1890 auto_vec<const_tree, 16> fieldsx;
1891 while (handled_component_p (x))
1892 {
1893 if (TREE_CODE (x) == COMPONENT_REF)
1894 {
1895 tree field = TREE_OPERAND (x, 1);
1896 tree type = DECL_FIELD_CONTEXT (field);
1897 if (TREE_CODE (type) == RECORD_TYPE)
1898 fieldsx.safe_push (field);
1899 }
1900 else if (ends_tbaa_access_path_p (x))
1901 fieldsx.truncate (0);
1902 x = TREE_OPERAND (x, 0);
1903 }
1904 if (fieldsx.length () == 0)
1905 return false;
1906 auto_vec<const_tree, 16> fieldsy;
1907 while (handled_component_p (y))
1908 {
1909 if (TREE_CODE (y) == COMPONENT_REF)
1910 {
1911 tree field = TREE_OPERAND (y, 1);
1912 tree type = DECL_FIELD_CONTEXT (field);
1913 if (TREE_CODE (type) == RECORD_TYPE)
1914 fieldsy.safe_push (TREE_OPERAND (y, 1));
1915 }
1916 else if (ends_tbaa_access_path_p (y))
1917 fieldsy.truncate (0);
1918 y = TREE_OPERAND (y, 0);
1919 }
1920 if (fieldsy.length () == 0)
1921 {
1922 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1923 return false;
1924 }
1925
1926 /* Most common case first. */
1927 if (fieldsx.length () == 1
1928 && fieldsy.length () == 1)
1929 {
1930 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1931 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1932 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1933 {
1934 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1935 return true;
1936 }
1937 else
1938 {
1939 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1940 return false;
1941 }
1942 }
1943
1944 if (fieldsx.length () == 2)
1945 {
1946 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1947 std::swap (fieldsx[0], fieldsx[1]);
1948 }
1949 else
1950 fieldsx.qsort (ncr_compar);
1951
1952 if (fieldsy.length () == 2)
1953 {
1954 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1955 std::swap (fieldsy[0], fieldsy[1]);
1956 }
1957 else
1958 fieldsy.qsort (ncr_compar);
1959
1960 unsigned i = 0, j = 0;
1961 do
1962 {
1963 const_tree fieldx = fieldsx[i];
1964 const_tree fieldy = fieldsy[j];
1965
1966 /* We're left with accessing different fields of a structure,
1967 no possible overlap. */
1968 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1969 DECL_FIELD_CONTEXT (fieldy)) == 1
1970 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1971 {
1972 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1973 return true;
1974 }
1975
1976 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1977 {
1978 i++;
1979 if (i == fieldsx.length ())
1980 break;
1981 }
1982 else
1983 {
1984 j++;
1985 if (j == fieldsy.length ())
1986 break;
1987 }
1988 }
1989 while (1);
1990
1991 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1992 return false;
1993 }
1994
1995
1996 /* Return true if two memory references based on the variables BASE1
1997 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1998 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1999 if non-NULL are the complete memory reference trees. */
2000
2001 static bool
decl_refs_may_alias_p(tree ref1,tree base1,poly_int64 offset1,poly_int64 max_size1,poly_int64 size1,tree ref2,tree base2,poly_int64 offset2,poly_int64 max_size2,poly_int64 size2)2002 decl_refs_may_alias_p (tree ref1, tree base1,
2003 poly_int64 offset1, poly_int64 max_size1,
2004 poly_int64 size1,
2005 tree ref2, tree base2,
2006 poly_int64 offset2, poly_int64 max_size2,
2007 poly_int64 size2)
2008 {
2009 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2010
2011 /* If both references are based on different variables, they cannot alias. */
2012 if (compare_base_decls (base1, base2) == 0)
2013 return false;
2014
2015 /* If both references are based on the same variable, they cannot alias if
2016 the accesses do not overlap. */
2017 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2018 return false;
2019
2020 /* If there is must alias, there is no use disambiguating further. */
2021 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2022 return true;
2023
2024 /* For components with variable position, the above test isn't sufficient,
2025 so we disambiguate component references manually. */
2026 if (ref1 && ref2
2027 && handled_component_p (ref1) && handled_component_p (ref2)
2028 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
2029 return false;
2030
2031 return true;
2032 }
2033
2034 /* Return true if access with BASE is view converted.
2035 Base must not be stripped from inner MEM_REF (&decl)
2036 which is done by ao_ref_base and thus one extra walk
2037 of handled components is needed. */
2038
2039 static bool
view_converted_memref_p(tree base)2040 view_converted_memref_p (tree base)
2041 {
2042 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2043 return false;
2044 return same_type_for_tbaa (TREE_TYPE (base),
2045 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2046 }
2047
2048 /* Return true if an indirect reference based on *PTR1 constrained
2049 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2050 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2051 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2052 in which case they are computed on-demand. REF1 and REF2
2053 if non-NULL are the complete memory reference trees. */
2054
2055 static bool
indirect_ref_may_alias_decl_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,poly_int64 offset1,poly_int64 max_size1,poly_int64 size1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,poly_int64 offset2,poly_int64 max_size2,poly_int64 size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)2056 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2057 poly_int64 offset1, poly_int64 max_size1,
2058 poly_int64 size1,
2059 alias_set_type ref1_alias_set,
2060 alias_set_type base1_alias_set,
2061 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2062 poly_int64 offset2, poly_int64 max_size2,
2063 poly_int64 size2,
2064 alias_set_type ref2_alias_set,
2065 alias_set_type base2_alias_set, bool tbaa_p)
2066 {
2067 tree ptr1;
2068 tree ptrtype1, dbase2;
2069
2070 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2071 || TREE_CODE (base1) == TARGET_MEM_REF)
2072 && DECL_P (base2));
2073
2074 ptr1 = TREE_OPERAND (base1, 0);
2075 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2076
2077 /* If only one reference is based on a variable, they cannot alias if
2078 the pointer access is beyond the extent of the variable access.
2079 (the pointer base cannot validly point to an offset less than zero
2080 of the variable).
2081 ??? IVOPTs creates bases that do not honor this restriction,
2082 so do not apply this optimization for TARGET_MEM_REFs. */
2083 if (TREE_CODE (base1) != TARGET_MEM_REF
2084 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2085 return false;
2086
2087 /* If the pointer based access is bigger than the variable they cannot
2088 alias. This is similar to the check below where we use TBAA to
2089 increase the size of the pointer based access based on the dynamic
2090 type of a containing object we can infer from it. */
2091 poly_int64 dsize2;
2092 if (known_size_p (size1)
2093 && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2094 && known_lt (dsize2, size1))
2095 return false;
2096
2097 /* They also cannot alias if the pointer may not point to the decl. */
2098 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2099 return false;
2100
2101 /* Disambiguations that rely on strict aliasing rules follow. */
2102 if (!flag_strict_aliasing || !tbaa_p)
2103 return true;
2104
2105 /* If the alias set for a pointer access is zero all bets are off. */
2106 if (base1_alias_set == 0 || base2_alias_set == 0)
2107 return true;
2108
2109 /* When we are trying to disambiguate an access with a pointer dereference
2110 as base versus one with a decl as base we can use both the size
2111 of the decl and its dynamic type for extra disambiguation.
2112 ??? We do not know anything about the dynamic type of the decl
2113 other than that its alias-set contains base2_alias_set as a subset
2114 which does not help us here. */
2115 /* As we know nothing useful about the dynamic type of the decl just
2116 use the usual conflict check rather than a subset test.
2117 ??? We could introduce -fvery-strict-aliasing when the language
2118 does not allow decls to have a dynamic type that differs from their
2119 static type. Then we can check
2120 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2121 if (base1_alias_set != base2_alias_set
2122 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2123 return false;
2124
2125 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2126
2127 /* If the size of the access relevant for TBAA through the pointer
2128 is bigger than the size of the decl we can't possibly access the
2129 decl via that pointer. */
2130 if (/* ??? This in turn may run afoul when a decl of type T which is
2131 a member of union type U is accessed through a pointer to
2132 type U and sizeof T is smaller than sizeof U. */
2133 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2134 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2135 && compare_sizes (DECL_SIZE (base2),
2136 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2137 return false;
2138
2139 if (!ref2)
2140 return true;
2141
2142 /* If the decl is accessed via a MEM_REF, reconstruct the base
2143 we can use for TBAA and an appropriately adjusted offset. */
2144 dbase2 = ref2;
2145 while (handled_component_p (dbase2))
2146 dbase2 = TREE_OPERAND (dbase2, 0);
2147 poly_int64 doffset1 = offset1;
2148 poly_offset_int doffset2 = offset2;
2149 if (TREE_CODE (dbase2) == MEM_REF
2150 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2151 {
2152 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2153 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2154 /* If second reference is view-converted, give up now. */
2155 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2156 return true;
2157 }
2158
2159 /* If first reference is view-converted, give up now. */
2160 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2161 return true;
2162
2163 /* If both references are through the same type, they do not alias
2164 if the accesses do not overlap. This does extra disambiguation
2165 for mixed/pointer accesses but requires strict aliasing.
2166 For MEM_REFs we require that the component-ref offset we computed
2167 is relative to the start of the type which we ensure by
2168 comparing rvalue and access type and disregarding the constant
2169 pointer offset.
2170
2171 But avoid treating variable length arrays as "objects", instead assume they
2172 can overlap by an exact multiple of their element size.
2173 See gcc.dg/torture/alias-2.c. */
2174 if (((TREE_CODE (base1) != TARGET_MEM_REF
2175 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2176 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2177 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2178 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2179 {
2180 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2181 && (TYPE_SIZE (TREE_TYPE (base1))
2182 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2183 != INTEGER_CST));
2184 if (!partial_overlap
2185 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2186 return false;
2187 if (!ref1 || !ref2
2188 /* If there is must alias, there is no use disambiguating further. */
2189 || (!partial_overlap
2190 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2191 return true;
2192 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2193 partial_overlap);
2194 if (res == -1)
2195 return !nonoverlapping_component_refs_p (ref1, ref2);
2196 return !res;
2197 }
2198
2199 /* Do access-path based disambiguation. */
2200 if (ref1 && ref2
2201 && (handled_component_p (ref1) || handled_component_p (ref2)))
2202 return aliasing_component_refs_p (ref1,
2203 ref1_alias_set, base1_alias_set,
2204 offset1, max_size1,
2205 ref2,
2206 ref2_alias_set, base2_alias_set,
2207 offset2, max_size2);
2208
2209 return true;
2210 }
2211
2212 /* Return true if two indirect references based on *PTR1
2213 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2214 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2215 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2216 in which case they are computed on-demand. REF1 and REF2
2217 if non-NULL are the complete memory reference trees. */
2218
2219 static bool
indirect_refs_may_alias_p(tree ref1 ATTRIBUTE_UNUSED,tree base1,poly_int64 offset1,poly_int64 max_size1,poly_int64 size1,alias_set_type ref1_alias_set,alias_set_type base1_alias_set,tree ref2 ATTRIBUTE_UNUSED,tree base2,poly_int64 offset2,poly_int64 max_size2,poly_int64 size2,alias_set_type ref2_alias_set,alias_set_type base2_alias_set,bool tbaa_p)2220 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2221 poly_int64 offset1, poly_int64 max_size1,
2222 poly_int64 size1,
2223 alias_set_type ref1_alias_set,
2224 alias_set_type base1_alias_set,
2225 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2226 poly_int64 offset2, poly_int64 max_size2,
2227 poly_int64 size2,
2228 alias_set_type ref2_alias_set,
2229 alias_set_type base2_alias_set, bool tbaa_p)
2230 {
2231 tree ptr1;
2232 tree ptr2;
2233 tree ptrtype1, ptrtype2;
2234
2235 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2236 || TREE_CODE (base1) == TARGET_MEM_REF)
2237 && (TREE_CODE (base2) == MEM_REF
2238 || TREE_CODE (base2) == TARGET_MEM_REF));
2239
2240 ptr1 = TREE_OPERAND (base1, 0);
2241 ptr2 = TREE_OPERAND (base2, 0);
2242
2243 /* If both bases are based on pointers they cannot alias if they may not
2244 point to the same memory object or if they point to the same object
2245 and the accesses do not overlap. */
2246 if ((!cfun || gimple_in_ssa_p (cfun))
2247 && operand_equal_p (ptr1, ptr2, 0)
2248 && (((TREE_CODE (base1) != TARGET_MEM_REF
2249 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2250 && (TREE_CODE (base2) != TARGET_MEM_REF
2251 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2252 || (TREE_CODE (base1) == TARGET_MEM_REF
2253 && TREE_CODE (base2) == TARGET_MEM_REF
2254 && (TMR_STEP (base1) == TMR_STEP (base2)
2255 || (TMR_STEP (base1) && TMR_STEP (base2)
2256 && operand_equal_p (TMR_STEP (base1),
2257 TMR_STEP (base2), 0)))
2258 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2259 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2260 && operand_equal_p (TMR_INDEX (base1),
2261 TMR_INDEX (base2), 0)))
2262 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2263 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2264 && operand_equal_p (TMR_INDEX2 (base1),
2265 TMR_INDEX2 (base2), 0))))))
2266 {
2267 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2268 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2269 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2270 offset2 + moff2, max_size2))
2271 return false;
2272 /* If there is must alias, there is no use disambiguating further. */
2273 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2274 return true;
2275 if (ref1 && ref2)
2276 {
2277 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2278 false);
2279 if (res != -1)
2280 return !res;
2281 }
2282 }
2283 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2284 return false;
2285
2286 /* Disambiguations that rely on strict aliasing rules follow. */
2287 if (!flag_strict_aliasing || !tbaa_p)
2288 return true;
2289
2290 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2291 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2292
2293 /* If the alias set for a pointer access is zero all bets are off. */
2294 if (base1_alias_set == 0
2295 || base2_alias_set == 0)
2296 return true;
2297
2298 /* Do type-based disambiguation. */
2299 if (base1_alias_set != base2_alias_set
2300 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2301 return false;
2302
2303 /* If either reference is view-converted, give up now. */
2304 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2305 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2306 return true;
2307
2308 /* If both references are through the same type, they do not alias
2309 if the accesses do not overlap. This does extra disambiguation
2310 for mixed/pointer accesses but requires strict aliasing. */
2311 if ((TREE_CODE (base1) != TARGET_MEM_REF
2312 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2313 && (TREE_CODE (base2) != TARGET_MEM_REF
2314 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2315 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2316 TREE_TYPE (ptrtype2)) == 1)
2317 {
2318 /* But avoid treating arrays as "objects", instead assume they
2319 can overlap by an exact multiple of their element size.
2320 See gcc.dg/torture/alias-2.c. */
2321 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2322
2323 if (!partial_overlap
2324 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2325 return false;
2326 if (!ref1 || !ref2
2327 || (!partial_overlap
2328 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2329 return true;
2330 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2331 partial_overlap);
2332 if (res == -1)
2333 return !nonoverlapping_component_refs_p (ref1, ref2);
2334 return !res;
2335 }
2336
2337 /* Do access-path based disambiguation. */
2338 if (ref1 && ref2
2339 && (handled_component_p (ref1) || handled_component_p (ref2)))
2340 return aliasing_component_refs_p (ref1,
2341 ref1_alias_set, base1_alias_set,
2342 offset1, max_size1,
2343 ref2,
2344 ref2_alias_set, base2_alias_set,
2345 offset2, max_size2);
2346
2347 return true;
2348 }
2349
2350 /* Return true, if the two memory references REF1 and REF2 may alias. */
2351
2352 static bool
refs_may_alias_p_2(ao_ref * ref1,ao_ref * ref2,bool tbaa_p)2353 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2354 {
2355 tree base1, base2;
2356 poly_int64 offset1 = 0, offset2 = 0;
2357 poly_int64 max_size1 = -1, max_size2 = -1;
2358 bool var1_p, var2_p, ind1_p, ind2_p;
2359
2360 gcc_checking_assert ((!ref1->ref
2361 || TREE_CODE (ref1->ref) == SSA_NAME
2362 || DECL_P (ref1->ref)
2363 || TREE_CODE (ref1->ref) == STRING_CST
2364 || handled_component_p (ref1->ref)
2365 || TREE_CODE (ref1->ref) == MEM_REF
2366 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2367 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2368 && (!ref2->ref
2369 || TREE_CODE (ref2->ref) == SSA_NAME
2370 || DECL_P (ref2->ref)
2371 || TREE_CODE (ref2->ref) == STRING_CST
2372 || handled_component_p (ref2->ref)
2373 || TREE_CODE (ref2->ref) == MEM_REF
2374 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2375 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2376
2377 /* Decompose the references into their base objects and the access. */
2378 base1 = ao_ref_base (ref1);
2379 offset1 = ref1->offset;
2380 max_size1 = ref1->max_size;
2381 base2 = ao_ref_base (ref2);
2382 offset2 = ref2->offset;
2383 max_size2 = ref2->max_size;
2384
2385 /* We can end up with registers or constants as bases for example from
2386 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2387 which is seen as a struct copy. */
2388 if (TREE_CODE (base1) == SSA_NAME
2389 || TREE_CODE (base1) == CONST_DECL
2390 || TREE_CODE (base1) == CONSTRUCTOR
2391 || TREE_CODE (base1) == ADDR_EXPR
2392 || CONSTANT_CLASS_P (base1)
2393 || TREE_CODE (base2) == SSA_NAME
2394 || TREE_CODE (base2) == CONST_DECL
2395 || TREE_CODE (base2) == CONSTRUCTOR
2396 || TREE_CODE (base2) == ADDR_EXPR
2397 || CONSTANT_CLASS_P (base2))
2398 return false;
2399
2400 /* Two volatile accesses always conflict. */
2401 if (ref1->volatile_p
2402 && ref2->volatile_p)
2403 return true;
2404
2405 /* refN->ref may convey size information, do not confuse our workers
2406 with that but strip it - ao_ref_base took it into account already. */
2407 tree ref1ref = ref1->ref;
2408 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2409 ref1ref = TREE_OPERAND (ref1ref, 0);
2410 tree ref2ref = ref2->ref;
2411 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2412 ref2ref = TREE_OPERAND (ref2ref, 0);
2413
2414 /* Defer to simple offset based disambiguation if we have
2415 references based on two decls. Do this before defering to
2416 TBAA to handle must-alias cases in conformance with the
2417 GCC extension of allowing type-punning through unions. */
2418 var1_p = DECL_P (base1);
2419 var2_p = DECL_P (base2);
2420 if (var1_p && var2_p)
2421 return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2422 ref1->size,
2423 ref2ref, base2, offset2, max_size2,
2424 ref2->size);
2425
2426 /* We can end up referring to code via function and label decls.
2427 As we likely do not properly track code aliases conservatively
2428 bail out. */
2429 if (TREE_CODE (base1) == FUNCTION_DECL
2430 || TREE_CODE (base1) == LABEL_DECL
2431 || TREE_CODE (base2) == FUNCTION_DECL
2432 || TREE_CODE (base2) == LABEL_DECL)
2433 return true;
2434
2435 /* Handle restrict based accesses.
2436 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2437 here. */
2438 tree rbase1 = base1;
2439 tree rbase2 = base2;
2440 if (var1_p)
2441 {
2442 rbase1 = ref1ref;
2443 if (rbase1)
2444 while (handled_component_p (rbase1))
2445 rbase1 = TREE_OPERAND (rbase1, 0);
2446 }
2447 if (var2_p)
2448 {
2449 rbase2 = ref2ref;
2450 if (rbase2)
2451 while (handled_component_p (rbase2))
2452 rbase2 = TREE_OPERAND (rbase2, 0);
2453 }
2454 if (rbase1 && rbase2
2455 && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
2456 && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
2457 /* If the accesses are in the same restrict clique... */
2458 && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
2459 /* But based on different pointers they do not alias. */
2460 && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
2461 return false;
2462
2463 ind1_p = (TREE_CODE (base1) == MEM_REF
2464 || TREE_CODE (base1) == TARGET_MEM_REF);
2465 ind2_p = (TREE_CODE (base2) == MEM_REF
2466 || TREE_CODE (base2) == TARGET_MEM_REF);
2467
2468 /* Canonicalize the pointer-vs-decl case. */
2469 if (ind1_p && var2_p)
2470 {
2471 std::swap (offset1, offset2);
2472 std::swap (max_size1, max_size2);
2473 std::swap (base1, base2);
2474 std::swap (ref1, ref2);
2475 std::swap (ref1ref, ref2ref);
2476 var1_p = true;
2477 ind1_p = false;
2478 var2_p = false;
2479 ind2_p = true;
2480 }
2481
2482 /* First defer to TBAA if possible. */
2483 if (tbaa_p
2484 && flag_strict_aliasing
2485 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2486 ao_ref_alias_set (ref2)))
2487 return false;
2488
2489 /* If the reference is based on a pointer that points to memory
2490 that may not be written to then the other reference cannot possibly
2491 clobber it. */
2492 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2493 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2494 || (ind1_p
2495 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2496 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2497 return false;
2498
2499 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2500 if (var1_p && ind2_p)
2501 return indirect_ref_may_alias_decl_p (ref2ref, base2,
2502 offset2, max_size2, ref2->size,
2503 ao_ref_alias_set (ref2),
2504 ao_ref_base_alias_set (ref2),
2505 ref1ref, base1,
2506 offset1, max_size1, ref1->size,
2507 ao_ref_alias_set (ref1),
2508 ao_ref_base_alias_set (ref1),
2509 tbaa_p);
2510 else if (ind1_p && ind2_p)
2511 return indirect_refs_may_alias_p (ref1ref, base1,
2512 offset1, max_size1, ref1->size,
2513 ao_ref_alias_set (ref1),
2514 ao_ref_base_alias_set (ref1),
2515 ref2ref, base2,
2516 offset2, max_size2, ref2->size,
2517 ao_ref_alias_set (ref2),
2518 ao_ref_base_alias_set (ref2),
2519 tbaa_p);
2520
2521 gcc_unreachable ();
2522 }
2523
2524 /* Return true, if the two memory references REF1 and REF2 may alias
2525 and update statistics. */
2526
2527 bool
refs_may_alias_p_1(ao_ref * ref1,ao_ref * ref2,bool tbaa_p)2528 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2529 {
2530 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2531 if (res)
2532 ++alias_stats.refs_may_alias_p_may_alias;
2533 else
2534 ++alias_stats.refs_may_alias_p_no_alias;
2535 return res;
2536 }
2537
2538 static bool
refs_may_alias_p(tree ref1,ao_ref * ref2,bool tbaa_p)2539 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2540 {
2541 ao_ref r1;
2542 ao_ref_init (&r1, ref1);
2543 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2544 }
2545
2546 bool
refs_may_alias_p(tree ref1,tree ref2,bool tbaa_p)2547 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2548 {
2549 ao_ref r1, r2;
2550 ao_ref_init (&r1, ref1);
2551 ao_ref_init (&r2, ref2);
2552 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2553 }
2554
2555 /* Returns true if there is a anti-dependence for the STORE that
2556 executes after the LOAD. */
2557
2558 bool
refs_anti_dependent_p(tree load,tree store)2559 refs_anti_dependent_p (tree load, tree store)
2560 {
2561 ao_ref r1, r2;
2562 ao_ref_init (&r1, load);
2563 ao_ref_init (&r2, store);
2564 return refs_may_alias_p_1 (&r1, &r2, false);
2565 }
2566
2567 /* Returns true if there is a output dependence for the stores
2568 STORE1 and STORE2. */
2569
2570 bool
refs_output_dependent_p(tree store1,tree store2)2571 refs_output_dependent_p (tree store1, tree store2)
2572 {
2573 ao_ref r1, r2;
2574 ao_ref_init (&r1, store1);
2575 ao_ref_init (&r2, store2);
2576 return refs_may_alias_p_1 (&r1, &r2, false);
2577 }
2578
2579 /* Returns true if and only if REF may alias any access stored in TT.
2580 IF TBAA_P is true, use TBAA oracle. */
2581
2582 static bool
modref_may_conflict(const gcall * stmt,modref_tree<alias_set_type> * tt,ao_ref * ref,bool tbaa_p)2583 modref_may_conflict (const gcall *stmt,
2584 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2585 {
2586 alias_set_type base_set, ref_set;
2587 bool global_memory_ok = false;
2588
2589 if (tt->every_base)
2590 return true;
2591
2592 if (!dbg_cnt (ipa_mod_ref))
2593 return true;
2594
2595 base_set = ao_ref_base_alias_set (ref);
2596
2597 ref_set = ao_ref_alias_set (ref);
2598
2599 int num_tests = 0, max_tests = param_modref_max_tests;
2600 for (auto base_node : tt->bases)
2601 {
2602 if (tbaa_p && flag_strict_aliasing)
2603 {
2604 if (num_tests >= max_tests)
2605 return true;
2606 alias_stats.modref_tests++;
2607 if (!alias_sets_conflict_p (base_set, base_node->base))
2608 continue;
2609 num_tests++;
2610 }
2611
2612 if (base_node->every_ref)
2613 return true;
2614
2615 for (auto ref_node : base_node->refs)
2616 {
2617 /* Do not repeat same test as before. */
2618 if ((ref_set != base_set || base_node->base != ref_node->ref)
2619 && tbaa_p && flag_strict_aliasing)
2620 {
2621 if (num_tests >= max_tests)
2622 return true;
2623 alias_stats.modref_tests++;
2624 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2625 continue;
2626 num_tests++;
2627 }
2628
2629 if (ref_node->every_access)
2630 return true;
2631
2632 /* TBAA checks did not disambiguate, try individual accesses. */
2633 for (auto access_node : ref_node->accesses)
2634 {
2635 if (num_tests >= max_tests)
2636 return true;
2637
2638 if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2639 {
2640 if (global_memory_ok)
2641 continue;
2642 if (ref_may_alias_global_p (ref, true))
2643 return true;
2644 global_memory_ok = true;
2645 num_tests++;
2646 continue;
2647 }
2648
2649 tree arg = access_node.get_call_arg (stmt);
2650 if (!arg)
2651 return true;
2652
2653 alias_stats.modref_baseptr_tests++;
2654
2655 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2656 continue;
2657
2658 /* PTA oracle will be unhapy of arg is not an pointer. */
2659 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2660 return true;
2661
2662 /* If we don't have base pointer, give up. */
2663 if (!ref->ref && !ref->base)
2664 continue;
2665
2666 ao_ref ref2;
2667 if (access_node.get_ao_ref (stmt, &ref2))
2668 {
2669 ref2.ref_alias_set = ref_node->ref;
2670 ref2.base_alias_set = base_node->base;
2671 if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2672 return true;
2673 }
2674 else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2675 return true;
2676
2677 num_tests++;
2678 }
2679 }
2680 }
2681 return false;
2682 }
2683
2684 /* Check if REF conflicts with call using "fn spec" attribute.
2685 If CLOBBER is true we are checking for writes, otherwise check loads.
2686
2687 Return 0 if there are no conflicts (except for possible function call
2688 argument reads), 1 if there are conflicts and -1 if we can not decide by
2689 fn spec. */
2690
2691 static int
check_fnspec(gcall * call,ao_ref * ref,bool clobber)2692 check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2693 {
2694 attr_fnspec fnspec = gimple_call_fnspec (call);
2695 if (fnspec.known_p ())
2696 {
2697 if (clobber
2698 ? !fnspec.global_memory_written_p ()
2699 : !fnspec.global_memory_read_p ())
2700 {
2701 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2702 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2703 && (!fnspec.arg_specified_p (i)
2704 || (clobber ? fnspec.arg_maybe_written_p (i)
2705 : fnspec.arg_maybe_read_p (i))))
2706 {
2707 ao_ref dref;
2708 tree size = NULL_TREE;
2709 unsigned int size_arg;
2710
2711 if (!fnspec.arg_specified_p (i))
2712 ;
2713 else if (fnspec.arg_max_access_size_given_by_arg_p
2714 (i, &size_arg))
2715 size = gimple_call_arg (call, size_arg);
2716 else if (fnspec.arg_access_size_given_by_type_p (i))
2717 {
2718 tree callee = gimple_call_fndecl (call);
2719 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2720
2721 for (unsigned int p = 0; p < i; p++)
2722 t = TREE_CHAIN (t);
2723 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2724 }
2725 poly_int64 size_hwi;
2726 if (size
2727 && poly_int_tree_p (size, &size_hwi)
2728 && coeffs_in_range_p (size_hwi, 0,
2729 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
2730 {
2731 size_hwi = size_hwi * BITS_PER_UNIT;
2732 ao_ref_init_from_ptr_and_range (&dref,
2733 gimple_call_arg (call, i),
2734 true, 0, -1, size_hwi);
2735 }
2736 else
2737 ao_ref_init_from_ptr_and_range (&dref,
2738 gimple_call_arg (call, i),
2739 false, 0, -1, -1);
2740 if (refs_may_alias_p_1 (&dref, ref, false))
2741 return 1;
2742 }
2743 if (clobber
2744 && fnspec.errno_maybe_written_p ()
2745 && flag_errno_math
2746 && targetm.ref_may_alias_errno (ref))
2747 return 1;
2748 return 0;
2749 }
2750 }
2751
2752 /* FIXME: we should handle barriers more consistently, but for now leave the
2753 check here. */
2754 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2755 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2756 {
2757 /* __sync_* builtins and some OpenMP builtins act as threading
2758 barriers. */
2759 #undef DEF_SYNC_BUILTIN
2760 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2761 #include "sync-builtins.def"
2762 #undef DEF_SYNC_BUILTIN
2763 case BUILT_IN_GOMP_ATOMIC_START:
2764 case BUILT_IN_GOMP_ATOMIC_END:
2765 case BUILT_IN_GOMP_BARRIER:
2766 case BUILT_IN_GOMP_BARRIER_CANCEL:
2767 case BUILT_IN_GOMP_TASKWAIT:
2768 case BUILT_IN_GOMP_TASKGROUP_END:
2769 case BUILT_IN_GOMP_CRITICAL_START:
2770 case BUILT_IN_GOMP_CRITICAL_END:
2771 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2772 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2773 case BUILT_IN_GOMP_LOOP_END:
2774 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2775 case BUILT_IN_GOMP_ORDERED_START:
2776 case BUILT_IN_GOMP_ORDERED_END:
2777 case BUILT_IN_GOMP_SECTIONS_END:
2778 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2779 case BUILT_IN_GOMP_SINGLE_COPY_START:
2780 case BUILT_IN_GOMP_SINGLE_COPY_END:
2781 return 1;
2782
2783 default:
2784 return -1;
2785 }
2786 return -1;
2787 }
2788
2789 /* If the call CALL may use the memory reference REF return true,
2790 otherwise return false. */
2791
2792 static bool
ref_maybe_used_by_call_p_1(gcall * call,ao_ref * ref,bool tbaa_p)2793 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2794 {
2795 tree base, callee;
2796 unsigned i;
2797 int flags = gimple_call_flags (call);
2798
2799 if (flags & (ECF_CONST|ECF_NOVOPS))
2800 goto process_args;
2801
2802 /* A call that is not without side-effects might involve volatile
2803 accesses and thus conflicts with all other volatile accesses. */
2804 if (ref->volatile_p)
2805 return true;
2806
2807 callee = gimple_call_fndecl (call);
2808
2809 if (callee != NULL_TREE)
2810 {
2811 struct cgraph_node *node = cgraph_node::get (callee);
2812 /* We can not safely optimize based on summary of calle if it does
2813 not always bind to current def: it is possible that memory load
2814 was optimized out earlier and the interposed variant may not be
2815 optimized this way. */
2816 if (node && node->binds_to_current_def_p ())
2817 {
2818 modref_summary *summary = get_modref_function_summary (node);
2819 if (summary && !summary->calls_interposable)
2820 {
2821 if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2822 {
2823 alias_stats.modref_use_no_alias++;
2824 if (dump_file && (dump_flags & TDF_DETAILS))
2825 {
2826 fprintf (dump_file,
2827 "ipa-modref: call stmt ");
2828 print_gimple_stmt (dump_file, call, 0);
2829 fprintf (dump_file,
2830 "ipa-modref: call to %s does not use ",
2831 node->dump_name ());
2832 if (!ref->ref && ref->base)
2833 {
2834 fprintf (dump_file, "base: ");
2835 print_generic_expr (dump_file, ref->base);
2836 }
2837 else if (ref->ref)
2838 {
2839 fprintf (dump_file, "ref: ");
2840 print_generic_expr (dump_file, ref->ref);
2841 }
2842 fprintf (dump_file, " alias sets: %i->%i\n",
2843 ao_ref_base_alias_set (ref),
2844 ao_ref_alias_set (ref));
2845 }
2846 goto process_args;
2847 }
2848 alias_stats.modref_use_may_alias++;
2849 }
2850 }
2851 }
2852
2853 base = ao_ref_base (ref);
2854 if (!base)
2855 return true;
2856
2857 /* If the reference is based on a decl that is not aliased the call
2858 cannot possibly use it. */
2859 if (DECL_P (base)
2860 && !may_be_aliased (base)
2861 /* But local statics can be used through recursion. */
2862 && !is_global_var (base))
2863 goto process_args;
2864
2865 if (int res = check_fnspec (call, ref, false))
2866 {
2867 if (res == 1)
2868 return true;
2869 }
2870 else
2871 goto process_args;
2872
2873 /* Check if base is a global static variable that is not read
2874 by the function. */
2875 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2876 {
2877 struct cgraph_node *node = cgraph_node::get (callee);
2878 bitmap read;
2879 int id;
2880
2881 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2882 node yet. We should enforce that there are nodes for all decls in the
2883 IL and remove this check instead. */
2884 if (node
2885 && (id = ipa_reference_var_uid (base)) != -1
2886 && (read = ipa_reference_get_read_global (node))
2887 && !bitmap_bit_p (read, id))
2888 goto process_args;
2889 }
2890
2891 /* Check if the base variable is call-used. */
2892 if (DECL_P (base))
2893 {
2894 if (pt_solution_includes (gimple_call_use_set (call), base))
2895 return true;
2896 }
2897 else if ((TREE_CODE (base) == MEM_REF
2898 || TREE_CODE (base) == TARGET_MEM_REF)
2899 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2900 {
2901 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2902 if (!pi)
2903 return true;
2904
2905 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2906 return true;
2907 }
2908 else
2909 return true;
2910
2911 /* Inspect call arguments for passed-by-value aliases. */
2912 process_args:
2913 for (i = 0; i < gimple_call_num_args (call); ++i)
2914 {
2915 tree op = gimple_call_arg (call, i);
2916 int flags = gimple_call_arg_flags (call, i);
2917
2918 if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2919 continue;
2920
2921 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2922 op = TREE_OPERAND (op, 0);
2923
2924 if (TREE_CODE (op) != SSA_NAME
2925 && !is_gimple_min_invariant (op))
2926 {
2927 ao_ref r;
2928 ao_ref_init (&r, op);
2929 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2930 return true;
2931 }
2932 }
2933
2934 return false;
2935 }
2936
2937 static bool
ref_maybe_used_by_call_p(gcall * call,ao_ref * ref,bool tbaa_p)2938 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2939 {
2940 bool res;
2941 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2942 if (res)
2943 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2944 else
2945 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2946 return res;
2947 }
2948
2949
2950 /* If the statement STMT may use the memory reference REF return
2951 true, otherwise return false. */
2952
2953 bool
ref_maybe_used_by_stmt_p(gimple * stmt,ao_ref * ref,bool tbaa_p)2954 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2955 {
2956 if (is_gimple_assign (stmt))
2957 {
2958 tree rhs;
2959
2960 /* All memory assign statements are single. */
2961 if (!gimple_assign_single_p (stmt))
2962 return false;
2963
2964 rhs = gimple_assign_rhs1 (stmt);
2965 if (is_gimple_reg (rhs)
2966 || is_gimple_min_invariant (rhs)
2967 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2968 return false;
2969
2970 return refs_may_alias_p (rhs, ref, tbaa_p);
2971 }
2972 else if (is_gimple_call (stmt))
2973 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2974 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2975 {
2976 tree retval = gimple_return_retval (return_stmt);
2977 if (retval
2978 && TREE_CODE (retval) != SSA_NAME
2979 && !is_gimple_min_invariant (retval)
2980 && refs_may_alias_p (retval, ref, tbaa_p))
2981 return true;
2982 /* If ref escapes the function then the return acts as a use. */
2983 tree base = ao_ref_base (ref);
2984 if (!base)
2985 ;
2986 else if (DECL_P (base))
2987 return is_global_var (base);
2988 else if (TREE_CODE (base) == MEM_REF
2989 || TREE_CODE (base) == TARGET_MEM_REF)
2990 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0), false);
2991 return false;
2992 }
2993
2994 return true;
2995 }
2996
2997 bool
ref_maybe_used_by_stmt_p(gimple * stmt,tree ref,bool tbaa_p)2998 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2999 {
3000 ao_ref r;
3001 ao_ref_init (&r, ref);
3002 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
3003 }
3004
3005 /* If the call in statement CALL may clobber the memory reference REF
3006 return true, otherwise return false. */
3007
3008 bool
call_may_clobber_ref_p_1(gcall * call,ao_ref * ref,bool tbaa_p)3009 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
3010 {
3011 tree base;
3012 tree callee;
3013
3014 /* If the call is pure or const it cannot clobber anything. */
3015 if (gimple_call_flags (call)
3016 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
3017 return false;
3018 if (gimple_call_internal_p (call))
3019 switch (gimple_call_internal_fn (call))
3020 {
3021 /* Treat these internal calls like ECF_PURE for aliasing,
3022 they don't write to any memory the program should care about.
3023 They have important other side-effects, and read memory,
3024 so can't be ECF_NOVOPS. */
3025 case IFN_UBSAN_NULL:
3026 case IFN_UBSAN_BOUNDS:
3027 case IFN_UBSAN_VPTR:
3028 case IFN_UBSAN_OBJECT_SIZE:
3029 case IFN_UBSAN_PTR:
3030 case IFN_ASAN_CHECK:
3031 return false;
3032 default:
3033 break;
3034 }
3035
3036 callee = gimple_call_fndecl (call);
3037
3038 if (callee != NULL_TREE && !ref->volatile_p)
3039 {
3040 struct cgraph_node *node = cgraph_node::get (callee);
3041 if (node)
3042 {
3043 modref_summary *summary = get_modref_function_summary (node);
3044 if (summary)
3045 {
3046 if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
3047 && (!summary->writes_errno
3048 || !targetm.ref_may_alias_errno (ref)))
3049 {
3050 alias_stats.modref_clobber_no_alias++;
3051 if (dump_file && (dump_flags & TDF_DETAILS))
3052 {
3053 fprintf (dump_file,
3054 "ipa-modref: call stmt ");
3055 print_gimple_stmt (dump_file, call, 0);
3056 fprintf (dump_file,
3057 "ipa-modref: call to %s does not clobber ",
3058 node->dump_name ());
3059 if (!ref->ref && ref->base)
3060 {
3061 fprintf (dump_file, "base: ");
3062 print_generic_expr (dump_file, ref->base);
3063 }
3064 else if (ref->ref)
3065 {
3066 fprintf (dump_file, "ref: ");
3067 print_generic_expr (dump_file, ref->ref);
3068 }
3069 fprintf (dump_file, " alias sets: %i->%i\n",
3070 ao_ref_base_alias_set (ref),
3071 ao_ref_alias_set (ref));
3072 }
3073 return false;
3074 }
3075 alias_stats.modref_clobber_may_alias++;
3076 }
3077 }
3078 }
3079
3080 base = ao_ref_base (ref);
3081 if (!base)
3082 return true;
3083
3084 if (TREE_CODE (base) == SSA_NAME
3085 || CONSTANT_CLASS_P (base))
3086 return false;
3087
3088 /* A call that is not without side-effects might involve volatile
3089 accesses and thus conflicts with all other volatile accesses. */
3090 if (ref->volatile_p)
3091 return true;
3092
3093 /* If the reference is based on a decl that is not aliased the call
3094 cannot possibly clobber it. */
3095 if (DECL_P (base)
3096 && !may_be_aliased (base)
3097 /* But local non-readonly statics can be modified through recursion
3098 or the call may implement a threading barrier which we must
3099 treat as may-def. */
3100 && (TREE_READONLY (base)
3101 || !is_global_var (base)))
3102 return false;
3103
3104 /* If the reference is based on a pointer that points to memory
3105 that may not be written to then the call cannot possibly clobber it. */
3106 if ((TREE_CODE (base) == MEM_REF
3107 || TREE_CODE (base) == TARGET_MEM_REF)
3108 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3109 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3110 return false;
3111
3112 if (int res = check_fnspec (call, ref, true))
3113 {
3114 if (res == 1)
3115 return true;
3116 }
3117 else
3118 return false;
3119
3120 /* Check if base is a global static variable that is not written
3121 by the function. */
3122 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3123 {
3124 struct cgraph_node *node = cgraph_node::get (callee);
3125 bitmap written;
3126 int id;
3127
3128 if (node
3129 && (id = ipa_reference_var_uid (base)) != -1
3130 && (written = ipa_reference_get_written_global (node))
3131 && !bitmap_bit_p (written, id))
3132 return false;
3133 }
3134
3135 /* Check if the base variable is call-clobbered. */
3136 if (DECL_P (base))
3137 return pt_solution_includes (gimple_call_clobber_set (call), base);
3138 else if ((TREE_CODE (base) == MEM_REF
3139 || TREE_CODE (base) == TARGET_MEM_REF)
3140 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3141 {
3142 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3143 if (!pi)
3144 return true;
3145
3146 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3147 }
3148
3149 return true;
3150 }
3151
3152 /* If the call in statement CALL may clobber the memory reference REF
3153 return true, otherwise return false. */
3154
3155 bool
call_may_clobber_ref_p(gcall * call,tree ref,bool tbaa_p)3156 call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3157 {
3158 bool res;
3159 ao_ref r;
3160 ao_ref_init (&r, ref);
3161 res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3162 if (res)
3163 ++alias_stats.call_may_clobber_ref_p_may_alias;
3164 else
3165 ++alias_stats.call_may_clobber_ref_p_no_alias;
3166 return res;
3167 }
3168
3169
3170 /* If the statement STMT may clobber the memory reference REF return true,
3171 otherwise return false. */
3172
3173 bool
stmt_may_clobber_ref_p_1(gimple * stmt,ao_ref * ref,bool tbaa_p)3174 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3175 {
3176 if (is_gimple_call (stmt))
3177 {
3178 tree lhs = gimple_call_lhs (stmt);
3179 if (lhs
3180 && TREE_CODE (lhs) != SSA_NAME)
3181 {
3182 ao_ref r;
3183 ao_ref_init (&r, lhs);
3184 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3185 return true;
3186 }
3187
3188 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3189 }
3190 else if (gimple_assign_single_p (stmt))
3191 {
3192 tree lhs = gimple_assign_lhs (stmt);
3193 if (TREE_CODE (lhs) != SSA_NAME)
3194 {
3195 ao_ref r;
3196 ao_ref_init (&r, lhs);
3197 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3198 }
3199 }
3200 else if (gimple_code (stmt) == GIMPLE_ASM)
3201 return true;
3202
3203 return false;
3204 }
3205
3206 bool
stmt_may_clobber_ref_p(gimple * stmt,tree ref,bool tbaa_p)3207 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3208 {
3209 ao_ref r;
3210 ao_ref_init (&r, ref);
3211 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3212 }
3213
3214 /* Return true if store1 and store2 described by corresponding tuples
3215 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3216 address. */
3217
3218 static bool
same_addr_size_stores_p(tree base1,poly_int64 offset1,poly_int64 size1,poly_int64 max_size1,tree base2,poly_int64 offset2,poly_int64 size2,poly_int64 max_size2)3219 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3220 poly_int64 max_size1,
3221 tree base2, poly_int64 offset2, poly_int64 size2,
3222 poly_int64 max_size2)
3223 {
3224 /* Offsets need to be 0. */
3225 if (maybe_ne (offset1, 0)
3226 || maybe_ne (offset2, 0))
3227 return false;
3228
3229 bool base1_obj_p = SSA_VAR_P (base1);
3230 bool base2_obj_p = SSA_VAR_P (base2);
3231
3232 /* We need one object. */
3233 if (base1_obj_p == base2_obj_p)
3234 return false;
3235 tree obj = base1_obj_p ? base1 : base2;
3236
3237 /* And we need one MEM_REF. */
3238 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3239 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3240 if (base1_memref_p == base2_memref_p)
3241 return false;
3242 tree memref = base1_memref_p ? base1 : base2;
3243
3244 /* Sizes need to be valid. */
3245 if (!known_size_p (max_size1)
3246 || !known_size_p (max_size2)
3247 || !known_size_p (size1)
3248 || !known_size_p (size2))
3249 return false;
3250
3251 /* Max_size needs to match size. */
3252 if (maybe_ne (max_size1, size1)
3253 || maybe_ne (max_size2, size2))
3254 return false;
3255
3256 /* Sizes need to match. */
3257 if (maybe_ne (size1, size2))
3258 return false;
3259
3260
3261 /* Check that memref is a store to pointer with singleton points-to info. */
3262 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3263 return false;
3264 tree ptr = TREE_OPERAND (memref, 0);
3265 if (TREE_CODE (ptr) != SSA_NAME)
3266 return false;
3267 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3268 unsigned int pt_uid;
3269 if (pi == NULL
3270 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3271 return false;
3272
3273 /* Be conservative with non-call exceptions when the address might
3274 be NULL. */
3275 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3276 return false;
3277
3278 /* Check that ptr points relative to obj. */
3279 unsigned int obj_uid = DECL_PT_UID (obj);
3280 if (obj_uid != pt_uid)
3281 return false;
3282
3283 /* Check that the object size is the same as the store size. That ensures us
3284 that ptr points to the start of obj. */
3285 return (DECL_SIZE (obj)
3286 && poly_int_tree_p (DECL_SIZE (obj))
3287 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3288 }
3289
3290 /* Return true if REF is killed by an store described by
3291 BASE, OFFSET, SIZE and MAX_SIZE. */
3292
3293 static bool
store_kills_ref_p(tree base,poly_int64 offset,poly_int64 size,poly_int64 max_size,ao_ref * ref)3294 store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3295 poly_int64 max_size, ao_ref *ref)
3296 {
3297 poly_int64 ref_offset = ref->offset;
3298 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3299 so base == ref->base does not always hold. */
3300 if (base != ref->base)
3301 {
3302 /* Try using points-to info. */
3303 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3304 ref->offset, ref->size, ref->max_size))
3305 return true;
3306
3307 /* If both base and ref->base are MEM_REFs, only compare the
3308 first operand, and if the second operand isn't equal constant,
3309 try to add the offsets into offset and ref_offset. */
3310 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3311 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3312 {
3313 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3314 TREE_OPERAND (ref->base, 1)))
3315 {
3316 poly_offset_int off1 = mem_ref_offset (base);
3317 off1 <<= LOG2_BITS_PER_UNIT;
3318 off1 += offset;
3319 poly_offset_int off2 = mem_ref_offset (ref->base);
3320 off2 <<= LOG2_BITS_PER_UNIT;
3321 off2 += ref_offset;
3322 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3323 size = -1;
3324 }
3325 }
3326 else
3327 size = -1;
3328 }
3329 /* For a must-alias check we need to be able to constrain
3330 the access properly. */
3331 return (known_eq (size, max_size)
3332 && known_subrange_p (ref_offset, ref->max_size, offset, size));
3333 }
3334
3335 /* If STMT kills the memory reference REF return true, otherwise
3336 return false. */
3337
3338 bool
stmt_kills_ref_p(gimple * stmt,ao_ref * ref)3339 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3340 {
3341 if (!ao_ref_base (ref))
3342 return false;
3343
3344 if (gimple_has_lhs (stmt)
3345 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3346 /* The assignment is not necessarily carried out if it can throw
3347 and we can catch it in the current function where we could inspect
3348 the previous value.
3349 ??? We only need to care about the RHS throwing. For aggregate
3350 assignments or similar calls and non-call exceptions the LHS
3351 might throw as well. */
3352 && !stmt_can_throw_internal (cfun, stmt))
3353 {
3354 tree lhs = gimple_get_lhs (stmt);
3355 /* If LHS is literally a base of the access we are done. */
3356 if (ref->ref)
3357 {
3358 tree base = ref->ref;
3359 tree innermost_dropped_array_ref = NULL_TREE;
3360 if (handled_component_p (base))
3361 {
3362 tree saved_lhs0 = NULL_TREE;
3363 if (handled_component_p (lhs))
3364 {
3365 saved_lhs0 = TREE_OPERAND (lhs, 0);
3366 TREE_OPERAND (lhs, 0) = integer_zero_node;
3367 }
3368 do
3369 {
3370 /* Just compare the outermost handled component, if
3371 they are equal we have found a possible common
3372 base. */
3373 tree saved_base0 = TREE_OPERAND (base, 0);
3374 TREE_OPERAND (base, 0) = integer_zero_node;
3375 bool res = operand_equal_p (lhs, base, 0);
3376 TREE_OPERAND (base, 0) = saved_base0;
3377 if (res)
3378 break;
3379 /* Remember if we drop an array-ref that we need to
3380 double-check not being at struct end. */
3381 if (TREE_CODE (base) == ARRAY_REF
3382 || TREE_CODE (base) == ARRAY_RANGE_REF)
3383 innermost_dropped_array_ref = base;
3384 /* Otherwise drop handled components of the access. */
3385 base = saved_base0;
3386 }
3387 while (handled_component_p (base));
3388 if (saved_lhs0)
3389 TREE_OPERAND (lhs, 0) = saved_lhs0;
3390 }
3391 /* Finally check if the lhs has the same address and size as the
3392 base candidate of the access. Watch out if we have dropped
3393 an array-ref that was at struct end, this means ref->ref may
3394 be outside of the TYPE_SIZE of its base. */
3395 if ((! innermost_dropped_array_ref
3396 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3397 && (lhs == base
3398 || (((TYPE_SIZE (TREE_TYPE (lhs))
3399 == TYPE_SIZE (TREE_TYPE (base)))
3400 || (TYPE_SIZE (TREE_TYPE (lhs))
3401 && TYPE_SIZE (TREE_TYPE (base))
3402 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3403 TYPE_SIZE (TREE_TYPE (base)),
3404 0)))
3405 && operand_equal_p (lhs, base,
3406 OEP_ADDRESS_OF
3407 | OEP_MATCH_SIDE_EFFECTS))))
3408 {
3409 ++alias_stats.stmt_kills_ref_p_yes;
3410 return true;
3411 }
3412 }
3413
3414 /* Now look for non-literal equal bases with the restriction of
3415 handling constant offset and size. */
3416 /* For a must-alias check we need to be able to constrain
3417 the access properly. */
3418 if (!ref->max_size_known_p ())
3419 {
3420 ++alias_stats.stmt_kills_ref_p_no;
3421 return false;
3422 }
3423 poly_int64 size, offset, max_size;
3424 bool reverse;
3425 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3426 &reverse);
3427 if (store_kills_ref_p (base, offset, size, max_size, ref))
3428 {
3429 ++alias_stats.stmt_kills_ref_p_yes;
3430 return true;
3431 }
3432 }
3433
3434 if (is_gimple_call (stmt))
3435 {
3436 tree callee = gimple_call_fndecl (stmt);
3437 struct cgraph_node *node;
3438 modref_summary *summary;
3439
3440 /* Try to disambiguate using modref summary. Modref records a vector
3441 of stores with known offsets relative to function parameters that must
3442 happen every execution of function. Find if we have a matching
3443 store and verify that function can not use the value. */
3444 if (callee != NULL_TREE
3445 && (node = cgraph_node::get (callee)) != NULL
3446 && node->binds_to_current_def_p ()
3447 && (summary = get_modref_function_summary (node)) != NULL
3448 && summary->kills.length ()
3449 && (!cfun->can_throw_non_call_exceptions
3450 || !stmt_can_throw_internal (cfun, stmt)))
3451 {
3452 for (auto kill : summary->kills)
3453 {
3454 ao_ref dref;
3455
3456 /* We only can do useful compares if we know the access range
3457 precisely. */
3458 if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
3459 continue;
3460 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3461 dref.size, dref.max_size, ref))
3462 {
3463 /* For store to be killed it needs to not be used
3464 earlier. */
3465 if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
3466 true)
3467 || !dbg_cnt (ipa_mod_ref))
3468 break;
3469 if (dump_file && (dump_flags & TDF_DETAILS))
3470 {
3471 fprintf (dump_file,
3472 "ipa-modref: call stmt ");
3473 print_gimple_stmt (dump_file, stmt, 0);
3474 fprintf (dump_file,
3475 "ipa-modref: call to %s kills ",
3476 node->dump_name ());
3477 print_generic_expr (dump_file, ref->base);
3478 }
3479 ++alias_stats.modref_kill_yes;
3480 return true;
3481 }
3482 }
3483 ++alias_stats.modref_kill_no;
3484 }
3485 if (callee != NULL_TREE
3486 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3487 switch (DECL_FUNCTION_CODE (callee))
3488 {
3489 case BUILT_IN_FREE:
3490 {
3491 tree ptr = gimple_call_arg (stmt, 0);
3492 tree base = ao_ref_base (ref);
3493 if (base && TREE_CODE (base) == MEM_REF
3494 && TREE_OPERAND (base, 0) == ptr)
3495 {
3496 ++alias_stats.stmt_kills_ref_p_yes;
3497 return true;
3498 }
3499 break;
3500 }
3501
3502 case BUILT_IN_MEMCPY:
3503 case BUILT_IN_MEMPCPY:
3504 case BUILT_IN_MEMMOVE:
3505 case BUILT_IN_MEMSET:
3506 case BUILT_IN_MEMCPY_CHK:
3507 case BUILT_IN_MEMPCPY_CHK:
3508 case BUILT_IN_MEMMOVE_CHK:
3509 case BUILT_IN_MEMSET_CHK:
3510 case BUILT_IN_STRNCPY:
3511 case BUILT_IN_STPNCPY:
3512 case BUILT_IN_CALLOC:
3513 {
3514 /* For a must-alias check we need to be able to constrain
3515 the access properly. */
3516 if (!ref->max_size_known_p ())
3517 {
3518 ++alias_stats.stmt_kills_ref_p_no;
3519 return false;
3520 }
3521 tree dest;
3522 tree len;
3523
3524 /* In execution order a calloc call will never kill
3525 anything. However, DSE will (ab)use this interface
3526 to ask if a calloc call writes the same memory locations
3527 as a later assignment, memset, etc. So handle calloc
3528 in the expected way. */
3529 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3530 {
3531 tree arg0 = gimple_call_arg (stmt, 0);
3532 tree arg1 = gimple_call_arg (stmt, 1);
3533 if (TREE_CODE (arg0) != INTEGER_CST
3534 || TREE_CODE (arg1) != INTEGER_CST)
3535 {
3536 ++alias_stats.stmt_kills_ref_p_no;
3537 return false;
3538 }
3539
3540 dest = gimple_call_lhs (stmt);
3541 if (!dest)
3542 {
3543 ++alias_stats.stmt_kills_ref_p_no;
3544 return false;
3545 }
3546 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3547 }
3548 else
3549 {
3550 dest = gimple_call_arg (stmt, 0);
3551 len = gimple_call_arg (stmt, 2);
3552 }
3553 if (!poly_int_tree_p (len))
3554 return false;
3555 ao_ref dref;
3556 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3557 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3558 dref.size, dref.max_size, ref))
3559 {
3560 ++alias_stats.stmt_kills_ref_p_yes;
3561 return true;
3562 }
3563 break;
3564 }
3565
3566 case BUILT_IN_VA_END:
3567 {
3568 tree ptr = gimple_call_arg (stmt, 0);
3569 if (TREE_CODE (ptr) == ADDR_EXPR)
3570 {
3571 tree base = ao_ref_base (ref);
3572 if (TREE_OPERAND (ptr, 0) == base)
3573 {
3574 ++alias_stats.stmt_kills_ref_p_yes;
3575 return true;
3576 }
3577 }
3578 break;
3579 }
3580
3581 default:;
3582 }
3583 }
3584 ++alias_stats.stmt_kills_ref_p_no;
3585 return false;
3586 }
3587
3588 bool
stmt_kills_ref_p(gimple * stmt,tree ref)3589 stmt_kills_ref_p (gimple *stmt, tree ref)
3590 {
3591 ao_ref r;
3592 ao_ref_init (&r, ref);
3593 return stmt_kills_ref_p (stmt, &r);
3594 }
3595
3596
3597 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3598 TARGET or a statement clobbering the memory reference REF in which
3599 case false is returned. The walk starts with VUSE, one argument of PHI. */
3600
3601 static bool
maybe_skip_until(gimple * phi,tree & target,basic_block target_bb,ao_ref * ref,tree vuse,bool tbaa_p,unsigned int & limit,bitmap * visited,bool abort_on_visited,void * (* translate)(ao_ref *,tree,void *,translate_flags *),translate_flags disambiguate_only,void * data)3602 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3603 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3604 bitmap *visited, bool abort_on_visited,
3605 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3606 translate_flags disambiguate_only,
3607 void *data)
3608 {
3609 basic_block bb = gimple_bb (phi);
3610
3611 if (!*visited)
3612 *visited = BITMAP_ALLOC (NULL);
3613
3614 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3615
3616 /* Walk until we hit the target. */
3617 while (vuse != target)
3618 {
3619 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3620 /* If we are searching for the target VUSE by walking up to
3621 TARGET_BB dominating the original PHI we are finished once
3622 we reach a default def or a definition in a block dominating
3623 that block. Update TARGET and return. */
3624 if (!target
3625 && (gimple_nop_p (def_stmt)
3626 || dominated_by_p (CDI_DOMINATORS,
3627 target_bb, gimple_bb (def_stmt))))
3628 {
3629 target = vuse;
3630 return true;
3631 }
3632
3633 /* Recurse for PHI nodes. */
3634 if (gimple_code (def_stmt) == GIMPLE_PHI)
3635 {
3636 /* An already visited PHI node ends the walk successfully. */
3637 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3638 return !abort_on_visited;
3639 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3640 visited, abort_on_visited,
3641 translate, data, disambiguate_only);
3642 if (!vuse)
3643 return false;
3644 continue;
3645 }
3646 else if (gimple_nop_p (def_stmt))
3647 return false;
3648 else
3649 {
3650 /* A clobbering statement or the end of the IL ends it failing. */
3651 if ((int)limit <= 0)
3652 return false;
3653 --limit;
3654 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3655 {
3656 translate_flags tf = disambiguate_only;
3657 if (translate
3658 && (*translate) (ref, vuse, data, &tf) == NULL)
3659 ;
3660 else
3661 return false;
3662 }
3663 }
3664 /* If we reach a new basic-block see if we already skipped it
3665 in a previous walk that ended successfully. */
3666 if (gimple_bb (def_stmt) != bb)
3667 {
3668 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3669 return !abort_on_visited;
3670 bb = gimple_bb (def_stmt);
3671 }
3672 vuse = gimple_vuse (def_stmt);
3673 }
3674 return true;
3675 }
3676
3677
3678 /* Starting from a PHI node for the virtual operand of the memory reference
3679 REF find a continuation virtual operand that allows to continue walking
3680 statements dominating PHI skipping only statements that cannot possibly
3681 clobber REF. Decrements LIMIT for each alias disambiguation done
3682 and aborts the walk, returning NULL_TREE if it reaches zero.
3683 Returns NULL_TREE if no suitable virtual operand can be found. */
3684
3685 tree
get_continuation_for_phi(gimple * phi,ao_ref * ref,bool tbaa_p,unsigned int & limit,bitmap * visited,bool abort_on_visited,void * (* translate)(ao_ref *,tree,void *,translate_flags *),void * data,translate_flags disambiguate_only)3686 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3687 unsigned int &limit, bitmap *visited,
3688 bool abort_on_visited,
3689 void *(*translate)(ao_ref *, tree, void *,
3690 translate_flags *),
3691 void *data,
3692 translate_flags disambiguate_only)
3693 {
3694 unsigned nargs = gimple_phi_num_args (phi);
3695
3696 /* Through a single-argument PHI we can simply look through. */
3697 if (nargs == 1)
3698 return PHI_ARG_DEF (phi, 0);
3699
3700 /* For two or more arguments try to pairwise skip non-aliasing code
3701 until we hit the phi argument definition that dominates the other one. */
3702 basic_block phi_bb = gimple_bb (phi);
3703 tree arg0, arg1;
3704 unsigned i;
3705
3706 /* Find a candidate for the virtual operand which definition
3707 dominates those of all others. */
3708 /* First look if any of the args themselves satisfy this. */
3709 for (i = 0; i < nargs; ++i)
3710 {
3711 arg0 = PHI_ARG_DEF (phi, i);
3712 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3713 break;
3714 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3715 if (def_bb != phi_bb
3716 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3717 break;
3718 arg0 = NULL_TREE;
3719 }
3720 /* If not, look if we can reach such candidate by walking defs
3721 until we hit the immediate dominator. maybe_skip_until will
3722 do that for us. */
3723 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3724
3725 /* Then check against the (to be) found candidate. */
3726 for (i = 0; i < nargs; ++i)
3727 {
3728 arg1 = PHI_ARG_DEF (phi, i);
3729 if (arg1 == arg0)
3730 ;
3731 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3732 limit, visited,
3733 abort_on_visited,
3734 translate,
3735 /* Do not valueize when walking over
3736 backedges. */
3737 dominated_by_p
3738 (CDI_DOMINATORS,
3739 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3740 phi_bb)
3741 ? TR_DISAMBIGUATE
3742 : disambiguate_only, data))
3743 return NULL_TREE;
3744 }
3745
3746 return arg0;
3747 }
3748
3749 /* Based on the memory reference REF and its virtual use VUSE call
3750 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3751 itself. That is, for each virtual use for which its defining statement
3752 does not clobber REF.
3753
3754 WALKER is called with REF, the current virtual use and DATA. If
3755 WALKER returns non-NULL the walk stops and its result is returned.
3756 At the end of a non-successful walk NULL is returned.
3757
3758 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3759 use which definition is a statement that may clobber REF and DATA.
3760 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3761 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3762 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3763 to adjust REF and *DATA to make that valid.
3764
3765 VALUEIZE if non-NULL is called with the next VUSE that is considered
3766 and return value is substituted for that. This can be used to
3767 implement optimistic value-numbering for example. Note that the
3768 VUSE argument is assumed to be valueized already.
3769
3770 LIMIT specifies the number of alias queries we are allowed to do,
3771 the walk stops when it reaches zero and NULL is returned. LIMIT
3772 is decremented by the number of alias queries (plus adjustments
3773 done by the callbacks) upon return.
3774
3775 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3776
3777 void *
walk_non_aliased_vuses(ao_ref * ref,tree vuse,bool tbaa_p,void * (* walker)(ao_ref *,tree,void *),void * (* translate)(ao_ref *,tree,void *,translate_flags *),tree (* valueize)(tree),unsigned & limit,void * data)3778 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3779 void *(*walker)(ao_ref *, tree, void *),
3780 void *(*translate)(ao_ref *, tree, void *,
3781 translate_flags *),
3782 tree (*valueize)(tree),
3783 unsigned &limit, void *data)
3784 {
3785 bitmap visited = NULL;
3786 void *res;
3787 bool translated = false;
3788
3789 timevar_push (TV_ALIAS_STMT_WALK);
3790
3791 do
3792 {
3793 gimple *def_stmt;
3794
3795 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3796 res = (*walker) (ref, vuse, data);
3797 /* Abort walk. */
3798 if (res == (void *)-1)
3799 {
3800 res = NULL;
3801 break;
3802 }
3803 /* Lookup succeeded. */
3804 else if (res != NULL)
3805 break;
3806
3807 if (valueize)
3808 {
3809 vuse = valueize (vuse);
3810 if (!vuse)
3811 {
3812 res = NULL;
3813 break;
3814 }
3815 }
3816 def_stmt = SSA_NAME_DEF_STMT (vuse);
3817 if (gimple_nop_p (def_stmt))
3818 break;
3819 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3820 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3821 &visited, translated, translate, data);
3822 else
3823 {
3824 if ((int)limit <= 0)
3825 {
3826 res = NULL;
3827 break;
3828 }
3829 --limit;
3830 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3831 {
3832 if (!translate)
3833 break;
3834 translate_flags disambiguate_only = TR_TRANSLATE;
3835 res = (*translate) (ref, vuse, data, &disambiguate_only);
3836 /* Failed lookup and translation. */
3837 if (res == (void *)-1)
3838 {
3839 res = NULL;
3840 break;
3841 }
3842 /* Lookup succeeded. */
3843 else if (res != NULL)
3844 break;
3845 /* Translation succeeded, continue walking. */
3846 translated = translated || disambiguate_only == TR_TRANSLATE;
3847 }
3848 vuse = gimple_vuse (def_stmt);
3849 }
3850 }
3851 while (vuse);
3852
3853 if (visited)
3854 BITMAP_FREE (visited);
3855
3856 timevar_pop (TV_ALIAS_STMT_WALK);
3857
3858 return res;
3859 }
3860
3861
3862 /* Based on the memory reference REF call WALKER for each vdef whose
3863 defining statement may clobber REF, starting with VDEF. If REF
3864 is NULL_TREE, each defining statement is visited.
3865
3866 WALKER is called with REF, the current vdef and DATA. If WALKER
3867 returns true the walk is stopped, otherwise it continues.
3868
3869 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3870 The pointer may be NULL and then we do not track this information.
3871
3872 At PHI nodes walk_aliased_vdefs forks into one walk for each
3873 PHI argument (but only one walk continues at merge points), the
3874 return value is true if any of the walks was successful.
3875
3876 The function returns the number of statements walked or -1 if
3877 LIMIT stmts were walked and the walk was aborted at this point.
3878 If LIMIT is zero the walk is not aborted. */
3879
3880 static int
walk_aliased_vdefs_1(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited,unsigned int cnt,bool * function_entry_reached,unsigned limit)3881 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3882 bool (*walker)(ao_ref *, tree, void *), void *data,
3883 bitmap *visited, unsigned int cnt,
3884 bool *function_entry_reached, unsigned limit)
3885 {
3886 do
3887 {
3888 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3889
3890 if (*visited
3891 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3892 return cnt;
3893
3894 if (gimple_nop_p (def_stmt))
3895 {
3896 if (function_entry_reached)
3897 *function_entry_reached = true;
3898 return cnt;
3899 }
3900 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3901 {
3902 unsigned i;
3903 if (!*visited)
3904 *visited = BITMAP_ALLOC (NULL);
3905 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3906 {
3907 int res = walk_aliased_vdefs_1 (ref,
3908 gimple_phi_arg_def (def_stmt, i),
3909 walker, data, visited, cnt,
3910 function_entry_reached, limit);
3911 if (res == -1)
3912 return -1;
3913 cnt = res;
3914 }
3915 return cnt;
3916 }
3917
3918 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3919 cnt++;
3920 if (cnt == limit)
3921 return -1;
3922 if ((!ref
3923 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3924 && (*walker) (ref, vdef, data))
3925 return cnt;
3926
3927 vdef = gimple_vuse (def_stmt);
3928 }
3929 while (1);
3930 }
3931
3932 int
walk_aliased_vdefs(ao_ref * ref,tree vdef,bool (* walker)(ao_ref *,tree,void *),void * data,bitmap * visited,bool * function_entry_reached,unsigned int limit)3933 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3934 bool (*walker)(ao_ref *, tree, void *), void *data,
3935 bitmap *visited,
3936 bool *function_entry_reached, unsigned int limit)
3937 {
3938 bitmap local_visited = NULL;
3939 int ret;
3940
3941 timevar_push (TV_ALIAS_STMT_WALK);
3942
3943 if (function_entry_reached)
3944 *function_entry_reached = false;
3945
3946 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3947 visited ? visited : &local_visited, 0,
3948 function_entry_reached, limit);
3949 if (local_visited)
3950 BITMAP_FREE (local_visited);
3951
3952 timevar_pop (TV_ALIAS_STMT_WALK);
3953
3954 return ret;
3955 }
3956
3957 /* Verify validity of the fnspec string.
3958 See attr-fnspec.h for details. */
3959
3960 void
verify()3961 attr_fnspec::verify ()
3962 {
3963 bool err = false;
3964 if (!len)
3965 return;
3966
3967 /* Check return value specifier. */
3968 if (len < return_desc_size)
3969 err = true;
3970 else if ((len - return_desc_size) % arg_desc_size)
3971 err = true;
3972 else if ((str[0] < '1' || str[0] > '4')
3973 && str[0] != '.' && str[0] != 'm')
3974 err = true;
3975
3976 switch (str[1])
3977 {
3978 case ' ':
3979 case 'p':
3980 case 'P':
3981 case 'c':
3982 case 'C':
3983 break;
3984 default:
3985 err = true;
3986 }
3987 if (err)
3988 internal_error ("invalid fn spec attribute \"%s\"", str);
3989
3990 /* Now check all parameters. */
3991 for (unsigned int i = 0; arg_specified_p (i); i++)
3992 {
3993 unsigned int idx = arg_idx (i);
3994 switch (str[idx])
3995 {
3996 case 'x':
3997 case 'X':
3998 case 'r':
3999 case 'R':
4000 case 'o':
4001 case 'O':
4002 case 'w':
4003 case 'W':
4004 case '.':
4005 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
4006 || str[idx + 1] == 't')
4007 {
4008 if (str[idx] != 'r' && str[idx] != 'R'
4009 && str[idx] != 'w' && str[idx] != 'W'
4010 && str[idx] != 'o' && str[idx] != 'O')
4011 err = true;
4012 if (str[idx + 1] != 't'
4013 /* Size specified is scalar, so it should be described
4014 by ". " if specified at all. */
4015 && (arg_specified_p (str[idx + 1] - '1')
4016 && str[arg_idx (str[idx + 1] - '1')] != '.'))
4017 err = true;
4018 }
4019 else if (str[idx + 1] != ' ')
4020 err = true;
4021 break;
4022 default:
4023 if (str[idx] < '1' || str[idx] > '9')
4024 err = true;
4025 }
4026 if (err)
4027 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4028 }
4029 }
4030
4031 /* Return ture if TYPE1 and TYPE2 will always give the same answer
4032 when compared wit hother types using same_type_for_tbaa_p. */
4033
4034 static bool
types_equal_for_same_type_for_tbaa_p(tree type1,tree type2,bool lto_streaming_safe)4035 types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4036 bool lto_streaming_safe)
4037 {
4038 /* We use same_type_for_tbaa_p to match types in the access path.
4039 This check is overly conservative. */
4040 type1 = TYPE_MAIN_VARIANT (type1);
4041 type2 = TYPE_MAIN_VARIANT (type2);
4042
4043 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4044 != TYPE_STRUCTURAL_EQUALITY_P (type2))
4045 return false;
4046 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4047 return true;
4048
4049 if (lto_streaming_safe)
4050 return type1 == type2;
4051 else
4052 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4053 }
4054
4055 /* Compare REF1 and REF2 and return flags specifying their differences.
4056 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4057 types that are going to be recomputed.
4058 If TBAA is true also compare TBAA metadata. */
4059
4060 int
compare_ao_refs(ao_ref * ref1,ao_ref * ref2,bool lto_streaming_safe,bool tbaa)4061 ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4062 bool lto_streaming_safe,
4063 bool tbaa)
4064 {
4065 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4066 return SEMANTICS;
4067 tree base1 = ao_ref_base (ref1);
4068 tree base2 = ao_ref_base (ref2);
4069
4070 if (!known_eq (ref1->offset, ref2->offset)
4071 || !known_eq (ref1->size, ref2->size)
4072 || !known_eq (ref1->max_size, ref2->max_size))
4073 return SEMANTICS;
4074
4075 /* For variable accesses we need to compare actual paths
4076 to check that both refs are accessing same address and the access size. */
4077 if (!known_eq (ref1->size, ref1->max_size))
4078 {
4079 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4080 TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
4081 return SEMANTICS;
4082 tree r1 = ref1->ref;
4083 tree r2 = ref2->ref;
4084
4085 /* Handle toplevel COMPONENT_REFs of bitfields.
4086 Those are special since they are not allowed in
4087 ADDR_EXPR. */
4088 if (TREE_CODE (r1) == COMPONENT_REF
4089 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4090 {
4091 if (TREE_CODE (r2) != COMPONENT_REF
4092 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4093 return SEMANTICS;
4094 tree field1 = TREE_OPERAND (r1, 1);
4095 tree field2 = TREE_OPERAND (r2, 1);
4096 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4097 DECL_FIELD_OFFSET (field2), 0)
4098 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4099 DECL_FIELD_BIT_OFFSET (field2), 0)
4100 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
4101 || !types_compatible_p (TREE_TYPE (r1),
4102 TREE_TYPE (r2)))
4103 return SEMANTICS;
4104 r1 = TREE_OPERAND (r1, 0);
4105 r2 = TREE_OPERAND (r2, 0);
4106 }
4107 else if (TREE_CODE (r2) == COMPONENT_REF
4108 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4109 return SEMANTICS;
4110
4111 /* Similarly for bit field refs. */
4112 if (TREE_CODE (r1) == BIT_FIELD_REF)
4113 {
4114 if (TREE_CODE (r2) != BIT_FIELD_REF
4115 || !operand_equal_p (TREE_OPERAND (r1, 1),
4116 TREE_OPERAND (r2, 1), 0)
4117 || !operand_equal_p (TREE_OPERAND (r1, 2),
4118 TREE_OPERAND (r2, 2), 0)
4119 || !types_compatible_p (TREE_TYPE (r1),
4120 TREE_TYPE (r2)))
4121 return SEMANTICS;
4122 r1 = TREE_OPERAND (r1, 0);
4123 r2 = TREE_OPERAND (r2, 0);
4124 }
4125 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4126 return SEMANTICS;
4127
4128 /* Now we can compare the address of actual memory access. */
4129 if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4130 return SEMANTICS;
4131 }
4132 /* For constant accesses we get more matches by comparing offset only. */
4133 else if (!operand_equal_p (base1, base2,
4134 OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4135 return SEMANTICS;
4136
4137 /* We can't simply use get_object_alignment_1 on the full
4138 reference as for accesses with variable indexes this reports
4139 too conservative alignment. */
4140 unsigned int align1, align2;
4141 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4142 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4143 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4144 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4145 TYPE_ALIGN but still returns false. This seem to contradict
4146 its description. So compare even if alignment is unknown. */
4147 if (known1 != known2
4148 || (bitpos1 != bitpos2 || align1 != align2))
4149 return SEMANTICS;
4150
4151 /* Now we know that accesses are semantically same. */
4152 int flags = 0;
4153
4154 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4155 tree rbase1 = ref1->ref;
4156 if (rbase1)
4157 while (handled_component_p (rbase1))
4158 rbase1 = TREE_OPERAND (rbase1, 0);
4159 tree rbase2 = ref2->ref;
4160 while (handled_component_p (rbase2))
4161 rbase2 = TREE_OPERAND (rbase2, 0);
4162
4163 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4164 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4165 Otherwise we need to match bases and cliques. */
4166 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4167 && MR_DEPENDENCE_CLIQUE (rbase1))
4168 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4169 && MR_DEPENDENCE_CLIQUE (rbase2)))
4170 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4171 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4172 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4173 flags |= DEPENDENCE_CLIQUE;
4174
4175 if (!tbaa)
4176 return flags;
4177
4178 /* Alias sets are not stable across LTO sreaming; be conservative here
4179 and compare types the alias sets are ultimately based on. */
4180 if (lto_streaming_safe)
4181 {
4182 tree t1 = ao_ref_alias_ptr_type (ref1);
4183 tree t2 = ao_ref_alias_ptr_type (ref2);
4184 if (!alias_ptr_types_compatible_p (t1, t2))
4185 flags |= REF_ALIAS_SET;
4186
4187 t1 = ao_ref_base_alias_ptr_type (ref1);
4188 t2 = ao_ref_base_alias_ptr_type (ref2);
4189 if (!alias_ptr_types_compatible_p (t1, t2))
4190 flags |= BASE_ALIAS_SET;
4191 }
4192 else
4193 {
4194 if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4195 flags |= REF_ALIAS_SET;
4196 if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4197 flags |= BASE_ALIAS_SET;
4198 }
4199
4200 /* Access path is used only on non-view-converted references. */
4201 bool view_converted = view_converted_memref_p (rbase1);
4202 if (view_converted_memref_p (rbase2) != view_converted)
4203 return flags | ACCESS_PATH;
4204 else if (view_converted)
4205 return flags;
4206
4207
4208 /* Find start of access paths and look for trailing arrays. */
4209 tree c1 = ref1->ref, c2 = ref2->ref;
4210 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4211 int nskipped1 = 0, nskipped2 = 0;
4212 int i = 0;
4213
4214 for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4215 {
4216 if (component_ref_to_zero_sized_trailing_array_p (p1))
4217 end_struct_ref1 = p1;
4218 if (ends_tbaa_access_path_p (p1))
4219 c1 = p1, nskipped1 = i;
4220 i++;
4221 }
4222 for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4223 {
4224 if (component_ref_to_zero_sized_trailing_array_p (p2))
4225 end_struct_ref2 = p2;
4226 if (ends_tbaa_access_path_p (p2))
4227 c2 = p2, nskipped1 = i;
4228 i++;
4229 }
4230
4231 /* For variable accesses we can not rely on offset match bellow.
4232 We know that paths are struturally same, so only check that
4233 starts of TBAA paths did not diverge. */
4234 if (!known_eq (ref1->size, ref1->max_size)
4235 && nskipped1 != nskipped2)
4236 return flags | ACCESS_PATH;
4237
4238 /* Information about trailing refs is used by
4239 aliasing_component_refs_p that is applied only if paths
4240 has handled components.. */
4241 if (!handled_component_p (c1) && !handled_component_p (c2))
4242 ;
4243 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4244 return flags | ACCESS_PATH;
4245 if (end_struct_ref1
4246 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4247 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4248 return flags | ACCESS_PATH;
4249
4250 /* Now compare all handled components of the access path.
4251 We have three oracles that cares about access paths:
4252 - aliasing_component_refs_p
4253 - nonoverlapping_refs_since_match_p
4254 - nonoverlapping_component_refs_p
4255 We need to match things these oracles compare.
4256
4257 It is only necessary to check types for compatibility
4258 and offsets. Rest of what oracles compares are actual
4259 addresses. Those are already known to be same:
4260 - for constant accesses we check offsets
4261 - for variable accesses we already matched
4262 the path lexically with operand_equal_p. */
4263 while (true)
4264 {
4265 bool comp1 = handled_component_p (c1);
4266 bool comp2 = handled_component_p (c2);
4267
4268 if (comp1 != comp2)
4269 return flags | ACCESS_PATH;
4270 if (!comp1)
4271 break;
4272
4273 if (TREE_CODE (c1) != TREE_CODE (c2))
4274 return flags | ACCESS_PATH;
4275
4276 /* aliasing_component_refs_p attempts to find type match within
4277 the paths. For that reason both types needs to be equal
4278 with respect to same_type_for_tbaa_p. */
4279 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4280 TREE_TYPE (c2),
4281 lto_streaming_safe))
4282 return flags | ACCESS_PATH;
4283 if (component_ref_to_zero_sized_trailing_array_p (c1)
4284 != component_ref_to_zero_sized_trailing_array_p (c2))
4285 return flags | ACCESS_PATH;
4286
4287 /* aliasing_matching_component_refs_p compares
4288 offsets within the path. Other properties are ignored.
4289 Do not bother to verify offsets in variable accesses. Here we
4290 already compared them by operand_equal_p so they are
4291 structurally same. */
4292 if (!known_eq (ref1->size, ref1->max_size))
4293 {
4294 poly_int64 offadj1, sztmc1, msztmc1;
4295 bool reverse1;
4296 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4297 poly_int64 offadj2, sztmc2, msztmc2;
4298 bool reverse2;
4299 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4300 if (!known_eq (offadj1, offadj2))
4301 return flags | ACCESS_PATH;
4302 }
4303 c1 = TREE_OPERAND (c1, 0);
4304 c2 = TREE_OPERAND (c2, 0);
4305 }
4306 /* Finally test the access type. */
4307 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4308 TREE_TYPE (c2),
4309 lto_streaming_safe))
4310 return flags | ACCESS_PATH;
4311 return flags;
4312 }
4313
4314 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4315 and canonical types. */
4316 void
hash_ao_ref(ao_ref * ref,bool lto_streaming_safe,bool tbaa,inchash::hash & hstate)4317 ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4318 inchash::hash &hstate)
4319 {
4320 tree base = ao_ref_base (ref);
4321 tree tbase = base;
4322
4323 if (!known_eq (ref->size, ref->max_size))
4324 {
4325 tree r = ref->ref;
4326 if (TREE_CODE (r) == COMPONENT_REF
4327 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4328 {
4329 tree field = TREE_OPERAND (r, 1);
4330 hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4331 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4332 hash_operand (DECL_SIZE (field), hstate, 0);
4333 r = TREE_OPERAND (r, 0);
4334 }
4335 if (TREE_CODE (r) == BIT_FIELD_REF)
4336 {
4337 hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4338 hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4339 r = TREE_OPERAND (r, 0);
4340 }
4341 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4342 hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4343 }
4344 else
4345 {
4346 hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4347 hstate.add_poly_int (ref->offset);
4348 hstate.add_poly_int (ref->size);
4349 hstate.add_poly_int (ref->max_size);
4350 }
4351 if (!lto_streaming_safe && tbaa)
4352 {
4353 hstate.add_int (ao_ref_alias_set (ref));
4354 hstate.add_int (ao_ref_base_alias_set (ref));
4355 }
4356 }
4357