1 /* Tree based points-to analysis
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "gimple-walk.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "tree-ssa.h"
45 #include "tree-cfg.h"
46 #include "gimple-range.h"
47 #include "ipa-modref-tree.h"
48 #include "ipa-modref.h"
49 #include "attr-fnspec.h"
50
51 /* The idea behind this analyzer is to generate set constraints from the
52 program, then solve the resulting constraints in order to generate the
53 points-to sets.
54
55 Set constraints are a way of modeling program analysis problems that
56 involve sets. They consist of an inclusion constraint language,
57 describing the variables (each variable is a set) and operations that
58 are involved on the variables, and a set of rules that derive facts
59 from these operations. To solve a system of set constraints, you derive
60 all possible facts under the rules, which gives you the correct sets
61 as a consequence.
62
63 See "Efficient Field-sensitive pointer analysis for C" by "David
64 J. Pearce and Paul H. J. Kelly and Chris Hankin", at
65 http://citeseer.ist.psu.edu/pearce04efficient.html
66
67 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
68 of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at
69 http://citeseer.ist.psu.edu/heintze01ultrafast.html
70
71 There are three types of real constraint expressions, DEREF,
72 ADDRESSOF, and SCALAR. Each constraint expression consists
73 of a constraint type, a variable, and an offset.
74
75 SCALAR is a constraint expression type used to represent x, whether
76 it appears on the LHS or the RHS of a statement.
77 DEREF is a constraint expression type used to represent *x, whether
78 it appears on the LHS or the RHS of a statement.
79 ADDRESSOF is a constraint expression used to represent &x, whether
80 it appears on the LHS or the RHS of a statement.
81
82 Each pointer variable in the program is assigned an integer id, and
83 each field of a structure variable is assigned an integer id as well.
84
85 Structure variables are linked to their list of fields through a "next
86 field" in each variable that points to the next field in offset
87 order.
88 Each variable for a structure field has
89
90 1. "size", that tells the size in bits of that field.
91 2. "fullsize", that tells the size in bits of the entire structure.
92 3. "offset", that tells the offset in bits from the beginning of the
93 structure to this field.
94
95 Thus,
96 struct f
97 {
98 int a;
99 int b;
100 } foo;
101 int *bar;
102
103 looks like
104
105 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
106 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
107 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
108
109
110 In order to solve the system of set constraints, the following is
111 done:
112
113 1. Each constraint variable x has a solution set associated with it,
114 Sol(x).
115
116 2. Constraints are separated into direct, copy, and complex.
117 Direct constraints are ADDRESSOF constraints that require no extra
118 processing, such as P = &Q
119 Copy constraints are those of the form P = Q.
120 Complex constraints are all the constraints involving dereferences
121 and offsets (including offsetted copies).
122
123 3. All direct constraints of the form P = &Q are processed, such
124 that Q is added to Sol(P)
125
126 4. All complex constraints for a given constraint variable are stored in a
127 linked list attached to that variable's node.
128
129 5. A directed graph is built out of the copy constraints. Each
130 constraint variable is a node in the graph, and an edge from
131 Q to P is added for each copy constraint of the form P = Q
132
133 6. The graph is then walked, and solution sets are
134 propagated along the copy edges, such that an edge from Q to P
135 causes Sol(P) <- Sol(P) union Sol(Q).
136
137 7. As we visit each node, all complex constraints associated with
138 that node are processed by adding appropriate copy edges to the graph, or the
139 appropriate variables to the solution set.
140
141 8. The process of walking the graph is iterated until no solution
142 sets change.
143
144 Prior to walking the graph in steps 6 and 7, We perform static
145 cycle elimination on the constraint graph, as well
146 as off-line variable substitution.
147
148 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
149 on and turned into anything), but isn't. You can just see what offset
150 inside the pointed-to struct it's going to access.
151
152 TODO: Constant bounded arrays can be handled as if they were structs of the
153 same number of elements.
154
155 TODO: Modeling heap and incoming pointers becomes much better if we
156 add fields to them as we discover them, which we could do.
157
158 TODO: We could handle unions, but to be honest, it's probably not
159 worth the pain or slowdown. */
160
161 /* IPA-PTA optimizations possible.
162
163 When the indirect function called is ANYTHING we can add disambiguation
164 based on the function signatures (or simply the parameter count which
165 is the varinfo size). We also do not need to consider functions that
166 do not have their address taken.
167
168 The is_global_var bit which marks escape points is overly conservative
169 in IPA mode. Split it to is_escape_point and is_global_var - only
170 externally visible globals are escape points in IPA mode.
171 There is now is_ipa_escape_point but this is only used in a few
172 selected places.
173
174 The way we introduce DECL_PT_UID to avoid fixing up all points-to
175 sets in the translation unit when we copy a DECL during inlining
176 pessimizes precision. The advantage is that the DECL_PT_UID keeps
177 compile-time and memory usage overhead low - the points-to sets
178 do not grow or get unshared as they would during a fixup phase.
179 An alternative solution is to delay IPA PTA until after all
180 inlining transformations have been applied.
181
182 The way we propagate clobber/use information isn't optimized.
183 It should use a new complex constraint that properly filters
184 out local variables of the callee (though that would make
185 the sets invalid after inlining). OTOH we might as well
186 admit defeat to WHOPR and simply do all the clobber/use analysis
187 and propagation after PTA finished but before we threw away
188 points-to information for memory variables. WHOPR and PTA
189 do not play along well anyway - the whole constraint solving
190 would need to be done in WPA phase and it will be very interesting
191 to apply the results to local SSA names during LTRANS phase.
192
193 We probably should compute a per-function unit-ESCAPE solution
194 propagating it simply like the clobber / uses solutions. The
195 solution can go alongside the non-IPA escaped solution and be
196 used to query which vars escape the unit through a function.
197 This is also required to make the escaped-HEAP trick work in IPA mode.
198
199 We never put function decls in points-to sets so we do not
200 keep the set of called functions for indirect calls.
201
202 And probably more. */
203
204 static bool use_field_sensitive = true;
205 static int in_ipa_mode = 0;
206
207 /* Used for predecessor bitmaps. */
208 static bitmap_obstack predbitmap_obstack;
209
210 /* Used for points-to sets. */
211 static bitmap_obstack pta_obstack;
212
213 /* Used for oldsolution members of variables. */
214 static bitmap_obstack oldpta_obstack;
215
216 /* Used for per-solver-iteration bitmaps. */
217 static bitmap_obstack iteration_obstack;
218
219 static unsigned int create_variable_info_for (tree, const char *, bool);
220 typedef struct constraint_graph *constraint_graph_t;
221 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
222
223 struct constraint;
224 typedef struct constraint *constraint_t;
225
226
227 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
228 if (a) \
229 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
230
231 static struct constraint_stats
232 {
233 unsigned int total_vars;
234 unsigned int nonpointer_vars;
235 unsigned int unified_vars_static;
236 unsigned int unified_vars_dynamic;
237 unsigned int iterations;
238 unsigned int num_edges;
239 unsigned int num_implicit_edges;
240 unsigned int points_to_sets_created;
241 } stats;
242
243 struct variable_info
244 {
245 /* ID of this variable */
246 unsigned int id;
247
248 /* True if this is a variable created by the constraint analysis, such as
249 heap variables and constraints we had to break up. */
250 unsigned int is_artificial_var : 1;
251
252 /* True if this is a special variable whose solution set should not be
253 changed. */
254 unsigned int is_special_var : 1;
255
256 /* True for variables whose size is not known or variable. */
257 unsigned int is_unknown_size_var : 1;
258
259 /* True for (sub-)fields that represent a whole variable. */
260 unsigned int is_full_var : 1;
261
262 /* True if this is a heap variable. */
263 unsigned int is_heap_var : 1;
264
265 /* True if this is a register variable. */
266 unsigned int is_reg_var : 1;
267
268 /* True if this field may contain pointers. */
269 unsigned int may_have_pointers : 1;
270
271 /* True if this field has only restrict qualified pointers. */
272 unsigned int only_restrict_pointers : 1;
273
274 /* True if this represents a heap var created for a restrict qualified
275 pointer. */
276 unsigned int is_restrict_var : 1;
277
278 /* True if this represents a global variable. */
279 unsigned int is_global_var : 1;
280
281 /* True if this represents a module escape point for IPA analysis. */
282 unsigned int is_ipa_escape_point : 1;
283
284 /* True if this represents a IPA function info. */
285 unsigned int is_fn_info : 1;
286
287 /* True if this appears as RHS in a ADDRESSOF constraint. */
288 unsigned int address_taken : 1;
289
290 /* ??? Store somewhere better. */
291 unsigned short ruid;
292
293 /* The ID of the variable for the next field in this structure
294 or zero for the last field in this structure. */
295 unsigned next;
296
297 /* The ID of the variable for the first field in this structure. */
298 unsigned head;
299
300 /* Offset of this variable, in bits, from the base variable */
301 unsigned HOST_WIDE_INT offset;
302
303 /* Size of the variable, in bits. */
304 unsigned HOST_WIDE_INT size;
305
306 /* Full size of the base variable, in bits. */
307 unsigned HOST_WIDE_INT fullsize;
308
309 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
310 the final points-to solution because it reaches its containing
311 function recursively. Zero if none is needed. */
312 unsigned int shadow_var_uid;
313
314 /* Name of this variable */
315 const char *name;
316
317 /* Tree that this variable is associated with. */
318 tree decl;
319
320 /* Points-to set for this variable. */
321 bitmap solution;
322
323 /* Old points-to set for this variable. */
324 bitmap oldsolution;
325 };
326 typedef struct variable_info *varinfo_t;
327
328 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
329 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
330 unsigned HOST_WIDE_INT);
331 static varinfo_t lookup_vi_for_tree (tree);
332 static inline bool type_can_have_subvars (const_tree);
333 static void make_param_constraints (varinfo_t);
334
335 /* Pool of variable info structures. */
336 static object_allocator<variable_info> variable_info_pool
337 ("Variable info pool");
338
339 /* Map varinfo to final pt_solution. */
340 static hash_map<varinfo_t, pt_solution *> *final_solutions;
341 struct obstack final_solutions_obstack;
342
343 /* Table of variable info structures for constraint variables.
344 Indexed directly by variable info id. */
345 static vec<varinfo_t> varmap;
346
347 /* Return the varmap element N */
348
349 static inline varinfo_t
get_varinfo(unsigned int n)350 get_varinfo (unsigned int n)
351 {
352 return varmap[n];
353 }
354
355 /* Return the next variable in the list of sub-variables of VI
356 or NULL if VI is the last sub-variable. */
357
358 static inline varinfo_t
vi_next(varinfo_t vi)359 vi_next (varinfo_t vi)
360 {
361 return get_varinfo (vi->next);
362 }
363
364 /* Static IDs for the special variables. Variable ID zero is unused
365 and used as terminator for the sub-variable chain. */
366 enum { nothing_id = 1, anything_id = 2, string_id = 3,
367 escaped_id = 4, nonlocal_id = 5,
368 storedanything_id = 6, integer_id = 7 };
369
370 /* Return a new variable info structure consisting for a variable
371 named NAME, and using constraint graph node NODE. Append it
372 to the vector of variable info structures. */
373
374 static varinfo_t
new_var_info(tree t,const char * name,bool add_id)375 new_var_info (tree t, const char *name, bool add_id)
376 {
377 unsigned index = varmap.length ();
378 varinfo_t ret = variable_info_pool.allocate ();
379
380 if (dump_file && add_id)
381 {
382 char *tempname = xasprintf ("%s(%d)", name, index);
383 name = ggc_strdup (tempname);
384 free (tempname);
385 }
386
387 ret->id = index;
388 ret->name = name;
389 ret->decl = t;
390 /* Vars without decl are artificial and do not have sub-variables. */
391 ret->is_artificial_var = (t == NULL_TREE);
392 ret->is_special_var = false;
393 ret->is_unknown_size_var = false;
394 ret->is_full_var = (t == NULL_TREE);
395 ret->is_heap_var = false;
396 ret->may_have_pointers = true;
397 ret->only_restrict_pointers = false;
398 ret->is_restrict_var = false;
399 ret->ruid = 0;
400 ret->is_global_var = (t == NULL_TREE);
401 ret->is_ipa_escape_point = false;
402 ret->is_fn_info = false;
403 ret->address_taken = false;
404 if (t && DECL_P (t))
405 ret->is_global_var = (is_global_var (t)
406 /* We have to treat even local register variables
407 as escape points. */
408 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
409 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
410 ret->solution = BITMAP_ALLOC (&pta_obstack);
411 ret->oldsolution = NULL;
412 ret->next = 0;
413 ret->shadow_var_uid = 0;
414 ret->head = ret->id;
415
416 stats.total_vars++;
417
418 varmap.safe_push (ret);
419
420 return ret;
421 }
422
423 /* A map mapping call statements to per-stmt variables for uses
424 and clobbers specific to the call. */
425 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
426
427 /* Lookup or create the variable for the call statement CALL. */
428
429 static varinfo_t
get_call_vi(gcall * call)430 get_call_vi (gcall *call)
431 {
432 varinfo_t vi, vi2;
433
434 bool existed;
435 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
436 if (existed)
437 return *slot_p;
438
439 vi = new_var_info (NULL_TREE, "CALLUSED", true);
440 vi->offset = 0;
441 vi->size = 1;
442 vi->fullsize = 2;
443 vi->is_full_var = true;
444 vi->is_reg_var = true;
445
446 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
447 vi2->offset = 1;
448 vi2->size = 1;
449 vi2->fullsize = 2;
450 vi2->is_full_var = true;
451 vi2->is_reg_var = true;
452
453 vi->next = vi2->id;
454
455 *slot_p = vi;
456 return vi;
457 }
458
459 /* Lookup the variable for the call statement CALL representing
460 the uses. Returns NULL if there is nothing special about this call. */
461
462 static varinfo_t
lookup_call_use_vi(gcall * call)463 lookup_call_use_vi (gcall *call)
464 {
465 varinfo_t *slot_p = call_stmt_vars->get (call);
466 if (slot_p)
467 return *slot_p;
468
469 return NULL;
470 }
471
472 /* Lookup the variable for the call statement CALL representing
473 the clobbers. Returns NULL if there is nothing special about this call. */
474
475 static varinfo_t
lookup_call_clobber_vi(gcall * call)476 lookup_call_clobber_vi (gcall *call)
477 {
478 varinfo_t uses = lookup_call_use_vi (call);
479 if (!uses)
480 return NULL;
481
482 return vi_next (uses);
483 }
484
485 /* Lookup or create the variable for the call statement CALL representing
486 the uses. */
487
488 static varinfo_t
get_call_use_vi(gcall * call)489 get_call_use_vi (gcall *call)
490 {
491 return get_call_vi (call);
492 }
493
494 /* Lookup or create the variable for the call statement CALL representing
495 the clobbers. */
496
497 static varinfo_t ATTRIBUTE_UNUSED
get_call_clobber_vi(gcall * call)498 get_call_clobber_vi (gcall *call)
499 {
500 return vi_next (get_call_vi (call));
501 }
502
503
504 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
505
506 /* An expression that appears in a constraint. */
507
508 struct constraint_expr
509 {
510 /* Constraint type. */
511 constraint_expr_type type;
512
513 /* Variable we are referring to in the constraint. */
514 unsigned int var;
515
516 /* Offset, in bits, of this constraint from the beginning of
517 variables it ends up referring to.
518
519 IOW, in a deref constraint, we would deref, get the result set,
520 then add OFFSET to each member. */
521 HOST_WIDE_INT offset;
522 };
523
524 /* Use 0x8000... as special unknown offset. */
525 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
526
527 typedef struct constraint_expr ce_s;
528 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
529 static void get_constraint_for (tree, vec<ce_s> *);
530 static void get_constraint_for_rhs (tree, vec<ce_s> *);
531 static void do_deref (vec<ce_s> *);
532
533 /* Our set constraints are made up of two constraint expressions, one
534 LHS, and one RHS.
535
536 As described in the introduction, our set constraints each represent an
537 operation between set valued variables.
538 */
539 struct constraint
540 {
541 struct constraint_expr lhs;
542 struct constraint_expr rhs;
543 };
544
545 /* List of constraints that we use to build the constraint graph from. */
546
547 static vec<constraint_t> constraints;
548 static object_allocator<constraint> constraint_pool ("Constraint pool");
549
550 /* The constraint graph is represented as an array of bitmaps
551 containing successor nodes. */
552
553 struct constraint_graph
554 {
555 /* Size of this graph, which may be different than the number of
556 nodes in the variable map. */
557 unsigned int size;
558
559 /* Explicit successors of each node. */
560 bitmap *succs;
561
562 /* Implicit predecessors of each node (Used for variable
563 substitution). */
564 bitmap *implicit_preds;
565
566 /* Explicit predecessors of each node (Used for variable substitution). */
567 bitmap *preds;
568
569 /* Indirect cycle representatives, or -1 if the node has no indirect
570 cycles. */
571 int *indirect_cycles;
572
573 /* Representative node for a node. rep[a] == a unless the node has
574 been unified. */
575 unsigned int *rep;
576
577 /* Equivalence class representative for a label. This is used for
578 variable substitution. */
579 int *eq_rep;
580
581 /* Pointer equivalence label for a node. All nodes with the same
582 pointer equivalence label can be unified together at some point
583 (either during constraint optimization or after the constraint
584 graph is built). */
585 unsigned int *pe;
586
587 /* Pointer equivalence representative for a label. This is used to
588 handle nodes that are pointer equivalent but not location
589 equivalent. We can unite these once the addressof constraints
590 are transformed into initial points-to sets. */
591 int *pe_rep;
592
593 /* Pointer equivalence label for each node, used during variable
594 substitution. */
595 unsigned int *pointer_label;
596
597 /* Location equivalence label for each node, used during location
598 equivalence finding. */
599 unsigned int *loc_label;
600
601 /* Pointed-by set for each node, used during location equivalence
602 finding. This is pointed-by rather than pointed-to, because it
603 is constructed using the predecessor graph. */
604 bitmap *pointed_by;
605
606 /* Points to sets for pointer equivalence. This is *not* the actual
607 points-to sets for nodes. */
608 bitmap *points_to;
609
610 /* Bitmap of nodes where the bit is set if the node is a direct
611 node. Used for variable substitution. */
612 sbitmap direct_nodes;
613
614 /* Bitmap of nodes where the bit is set if the node is address
615 taken. Used for variable substitution. */
616 bitmap address_taken;
617
618 /* Vector of complex constraints for each graph node. Complex
619 constraints are those involving dereferences or offsets that are
620 not 0. */
621 vec<constraint_t> *complex;
622 };
623
624 static constraint_graph_t graph;
625
626 /* During variable substitution and the offline version of indirect
627 cycle finding, we create nodes to represent dereferences and
628 address taken constraints. These represent where these start and
629 end. */
630 #define FIRST_REF_NODE (varmap).length ()
631 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
632
633 /* Return the representative node for NODE, if NODE has been unioned
634 with another NODE.
635 This function performs path compression along the way to finding
636 the representative. */
637
638 static unsigned int
find(unsigned int node)639 find (unsigned int node)
640 {
641 gcc_checking_assert (node < graph->size);
642 if (graph->rep[node] != node)
643 return graph->rep[node] = find (graph->rep[node]);
644 return node;
645 }
646
647 /* Union the TO and FROM nodes to the TO nodes.
648 Note that at some point in the future, we may want to do
649 union-by-rank, in which case we are going to have to return the
650 node we unified to. */
651
652 static bool
unite(unsigned int to,unsigned int from)653 unite (unsigned int to, unsigned int from)
654 {
655 gcc_checking_assert (to < graph->size && from < graph->size);
656 if (to != from && graph->rep[from] != to)
657 {
658 graph->rep[from] = to;
659 return true;
660 }
661 return false;
662 }
663
664 /* Create a new constraint consisting of LHS and RHS expressions. */
665
666 static constraint_t
new_constraint(const struct constraint_expr lhs,const struct constraint_expr rhs)667 new_constraint (const struct constraint_expr lhs,
668 const struct constraint_expr rhs)
669 {
670 constraint_t ret = constraint_pool.allocate ();
671 ret->lhs = lhs;
672 ret->rhs = rhs;
673 return ret;
674 }
675
676 /* Print out constraint C to FILE. */
677
678 static void
dump_constraint(FILE * file,constraint_t c)679 dump_constraint (FILE *file, constraint_t c)
680 {
681 if (c->lhs.type == ADDRESSOF)
682 fprintf (file, "&");
683 else if (c->lhs.type == DEREF)
684 fprintf (file, "*");
685 if (dump_file)
686 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
687 else
688 fprintf (file, "V%d", c->lhs.var);
689 if (c->lhs.offset == UNKNOWN_OFFSET)
690 fprintf (file, " + UNKNOWN");
691 else if (c->lhs.offset != 0)
692 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
693 fprintf (file, " = ");
694 if (c->rhs.type == ADDRESSOF)
695 fprintf (file, "&");
696 else if (c->rhs.type == DEREF)
697 fprintf (file, "*");
698 if (dump_file)
699 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
700 else
701 fprintf (file, "V%d", c->rhs.var);
702 if (c->rhs.offset == UNKNOWN_OFFSET)
703 fprintf (file, " + UNKNOWN");
704 else if (c->rhs.offset != 0)
705 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
706 }
707
708
709 void debug_constraint (constraint_t);
710 void debug_constraints (void);
711 void debug_constraint_graph (void);
712 void debug_solution_for_var (unsigned int);
713 void debug_sa_points_to_info (void);
714 void debug_varinfo (varinfo_t);
715 void debug_varmap (void);
716
717 /* Print out constraint C to stderr. */
718
719 DEBUG_FUNCTION void
debug_constraint(constraint_t c)720 debug_constraint (constraint_t c)
721 {
722 dump_constraint (stderr, c);
723 fprintf (stderr, "\n");
724 }
725
726 /* Print out all constraints to FILE */
727
728 static void
dump_constraints(FILE * file,int from)729 dump_constraints (FILE *file, int from)
730 {
731 int i;
732 constraint_t c;
733 for (i = from; constraints.iterate (i, &c); i++)
734 if (c)
735 {
736 dump_constraint (file, c);
737 fprintf (file, "\n");
738 }
739 }
740
741 /* Print out all constraints to stderr. */
742
743 DEBUG_FUNCTION void
debug_constraints(void)744 debug_constraints (void)
745 {
746 dump_constraints (stderr, 0);
747 }
748
749 /* Print the constraint graph in dot format. */
750
751 static void
dump_constraint_graph(FILE * file)752 dump_constraint_graph (FILE *file)
753 {
754 unsigned int i;
755
756 /* Only print the graph if it has already been initialized: */
757 if (!graph)
758 return;
759
760 /* Prints the header of the dot file: */
761 fprintf (file, "strict digraph {\n");
762 fprintf (file, " node [\n shape = box\n ]\n");
763 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
764 fprintf (file, "\n // List of nodes and complex constraints in "
765 "the constraint graph:\n");
766
767 /* The next lines print the nodes in the graph together with the
768 complex constraints attached to them. */
769 for (i = 1; i < graph->size; i++)
770 {
771 if (i == FIRST_REF_NODE)
772 continue;
773 if (find (i) != i)
774 continue;
775 if (i < FIRST_REF_NODE)
776 fprintf (file, "\"%s\"", get_varinfo (i)->name);
777 else
778 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
779 if (graph->complex[i].exists ())
780 {
781 unsigned j;
782 constraint_t c;
783 fprintf (file, " [label=\"\\N\\n");
784 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
785 {
786 dump_constraint (file, c);
787 fprintf (file, "\\l");
788 }
789 fprintf (file, "\"]");
790 }
791 fprintf (file, ";\n");
792 }
793
794 /* Go over the edges. */
795 fprintf (file, "\n // Edges in the constraint graph:\n");
796 for (i = 1; i < graph->size; i++)
797 {
798 unsigned j;
799 bitmap_iterator bi;
800 if (find (i) != i)
801 continue;
802 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
803 {
804 unsigned to = find (j);
805 if (i == to)
806 continue;
807 if (i < FIRST_REF_NODE)
808 fprintf (file, "\"%s\"", get_varinfo (i)->name);
809 else
810 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
811 fprintf (file, " -> ");
812 if (to < FIRST_REF_NODE)
813 fprintf (file, "\"%s\"", get_varinfo (to)->name);
814 else
815 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
816 fprintf (file, ";\n");
817 }
818 }
819
820 /* Prints the tail of the dot file. */
821 fprintf (file, "}\n");
822 }
823
824 /* Print out the constraint graph to stderr. */
825
826 DEBUG_FUNCTION void
debug_constraint_graph(void)827 debug_constraint_graph (void)
828 {
829 dump_constraint_graph (stderr);
830 }
831
832 /* SOLVER FUNCTIONS
833
834 The solver is a simple worklist solver, that works on the following
835 algorithm:
836
837 sbitmap changed_nodes = all zeroes;
838 changed_count = 0;
839 For each node that is not already collapsed:
840 changed_count++;
841 set bit in changed nodes
842
843 while (changed_count > 0)
844 {
845 compute topological ordering for constraint graph
846
847 find and collapse cycles in the constraint graph (updating
848 changed if necessary)
849
850 for each node (n) in the graph in topological order:
851 changed_count--;
852
853 Process each complex constraint associated with the node,
854 updating changed if necessary.
855
856 For each outgoing edge from n, propagate the solution from n to
857 the destination of the edge, updating changed as necessary.
858
859 } */
860
861 /* Return true if two constraint expressions A and B are equal. */
862
863 static bool
constraint_expr_equal(struct constraint_expr a,struct constraint_expr b)864 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
865 {
866 return a.type == b.type && a.var == b.var && a.offset == b.offset;
867 }
868
869 /* Return true if constraint expression A is less than constraint expression
870 B. This is just arbitrary, but consistent, in order to give them an
871 ordering. */
872
873 static bool
constraint_expr_less(struct constraint_expr a,struct constraint_expr b)874 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
875 {
876 if (a.type == b.type)
877 {
878 if (a.var == b.var)
879 return a.offset < b.offset;
880 else
881 return a.var < b.var;
882 }
883 else
884 return a.type < b.type;
885 }
886
887 /* Return true if constraint A is less than constraint B. This is just
888 arbitrary, but consistent, in order to give them an ordering. */
889
890 static bool
constraint_less(const constraint_t & a,const constraint_t & b)891 constraint_less (const constraint_t &a, const constraint_t &b)
892 {
893 if (constraint_expr_less (a->lhs, b->lhs))
894 return true;
895 else if (constraint_expr_less (b->lhs, a->lhs))
896 return false;
897 else
898 return constraint_expr_less (a->rhs, b->rhs);
899 }
900
901 /* Return true if two constraints A and B are equal. */
902
903 static bool
constraint_equal(struct constraint a,struct constraint b)904 constraint_equal (struct constraint a, struct constraint b)
905 {
906 return constraint_expr_equal (a.lhs, b.lhs)
907 && constraint_expr_equal (a.rhs, b.rhs);
908 }
909
910
911 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
912
913 static constraint_t
constraint_vec_find(vec<constraint_t> vec,struct constraint lookfor)914 constraint_vec_find (vec<constraint_t> vec,
915 struct constraint lookfor)
916 {
917 unsigned int place;
918 constraint_t found;
919
920 if (!vec.exists ())
921 return NULL;
922
923 place = vec.lower_bound (&lookfor, constraint_less);
924 if (place >= vec.length ())
925 return NULL;
926 found = vec[place];
927 if (!constraint_equal (*found, lookfor))
928 return NULL;
929 return found;
930 }
931
932 /* Union two constraint vectors, TO and FROM. Put the result in TO.
933 Returns true of TO set is changed. */
934
935 static bool
constraint_set_union(vec<constraint_t> * to,vec<constraint_t> * from)936 constraint_set_union (vec<constraint_t> *to,
937 vec<constraint_t> *from)
938 {
939 int i;
940 constraint_t c;
941 bool any_change = false;
942
943 FOR_EACH_VEC_ELT (*from, i, c)
944 {
945 if (constraint_vec_find (*to, *c) == NULL)
946 {
947 unsigned int place = to->lower_bound (c, constraint_less);
948 to->safe_insert (place, c);
949 any_change = true;
950 }
951 }
952 return any_change;
953 }
954
955 /* Expands the solution in SET to all sub-fields of variables included. */
956
957 static bitmap
solution_set_expand(bitmap set,bitmap * expanded)958 solution_set_expand (bitmap set, bitmap *expanded)
959 {
960 bitmap_iterator bi;
961 unsigned j;
962
963 if (*expanded)
964 return *expanded;
965
966 *expanded = BITMAP_ALLOC (&iteration_obstack);
967
968 /* In a first pass expand to the head of the variables we need to
969 add all sub-fields off. This avoids quadratic behavior. */
970 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
971 {
972 varinfo_t v = get_varinfo (j);
973 if (v->is_artificial_var
974 || v->is_full_var)
975 continue;
976 bitmap_set_bit (*expanded, v->head);
977 }
978
979 /* In the second pass now expand all head variables with subfields. */
980 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
981 {
982 varinfo_t v = get_varinfo (j);
983 if (v->head != j)
984 continue;
985 for (v = vi_next (v); v != NULL; v = vi_next (v))
986 bitmap_set_bit (*expanded, v->id);
987 }
988
989 /* And finally set the rest of the bits from SET. */
990 bitmap_ior_into (*expanded, set);
991
992 return *expanded;
993 }
994
995 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
996 process. */
997
998 static bool
set_union_with_increment(bitmap to,bitmap delta,HOST_WIDE_INT inc,bitmap * expanded_delta)999 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
1000 bitmap *expanded_delta)
1001 {
1002 bool changed = false;
1003 bitmap_iterator bi;
1004 unsigned int i;
1005
1006 /* If the solution of DELTA contains anything it is good enough to transfer
1007 this to TO. */
1008 if (bitmap_bit_p (delta, anything_id))
1009 return bitmap_set_bit (to, anything_id);
1010
1011 /* If the offset is unknown we have to expand the solution to
1012 all subfields. */
1013 if (inc == UNKNOWN_OFFSET)
1014 {
1015 delta = solution_set_expand (delta, expanded_delta);
1016 changed |= bitmap_ior_into (to, delta);
1017 return changed;
1018 }
1019
1020 /* For non-zero offset union the offsetted solution into the destination. */
1021 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1022 {
1023 varinfo_t vi = get_varinfo (i);
1024
1025 /* If this is a variable with just one field just set its bit
1026 in the result. */
1027 if (vi->is_artificial_var
1028 || vi->is_unknown_size_var
1029 || vi->is_full_var)
1030 changed |= bitmap_set_bit (to, i);
1031 else
1032 {
1033 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1034 unsigned HOST_WIDE_INT size = vi->size;
1035
1036 /* If the offset makes the pointer point to before the
1037 variable use offset zero for the field lookup. */
1038 if (fieldoffset < 0)
1039 vi = get_varinfo (vi->head);
1040 else
1041 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1042
1043 do
1044 {
1045 changed |= bitmap_set_bit (to, vi->id);
1046 if (vi->is_full_var
1047 || vi->next == 0)
1048 break;
1049
1050 /* We have to include all fields that overlap the current field
1051 shifted by inc. */
1052 vi = vi_next (vi);
1053 }
1054 while (vi->offset < fieldoffset + size);
1055 }
1056 }
1057
1058 return changed;
1059 }
1060
1061 /* Insert constraint C into the list of complex constraints for graph
1062 node VAR. */
1063
1064 static void
insert_into_complex(constraint_graph_t graph,unsigned int var,constraint_t c)1065 insert_into_complex (constraint_graph_t graph,
1066 unsigned int var, constraint_t c)
1067 {
1068 vec<constraint_t> complex = graph->complex[var];
1069 unsigned int place = complex.lower_bound (c, constraint_less);
1070
1071 /* Only insert constraints that do not already exist. */
1072 if (place >= complex.length ()
1073 || !constraint_equal (*c, *complex[place]))
1074 graph->complex[var].safe_insert (place, c);
1075 }
1076
1077
1078 /* Condense two variable nodes into a single variable node, by moving
1079 all associated info from FROM to TO. Returns true if TO node's
1080 constraint set changes after the merge. */
1081
1082 static bool
merge_node_constraints(constraint_graph_t graph,unsigned int to,unsigned int from)1083 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1084 unsigned int from)
1085 {
1086 unsigned int i;
1087 constraint_t c;
1088 bool any_change = false;
1089
1090 gcc_checking_assert (find (from) == to);
1091
1092 /* Move all complex constraints from src node into to node */
1093 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1094 {
1095 /* In complex constraints for node FROM, we may have either
1096 a = *FROM, and *FROM = a, or an offseted constraint which are
1097 always added to the rhs node's constraints. */
1098
1099 if (c->rhs.type == DEREF)
1100 c->rhs.var = to;
1101 else if (c->lhs.type == DEREF)
1102 c->lhs.var = to;
1103 else
1104 c->rhs.var = to;
1105
1106 }
1107 any_change = constraint_set_union (&graph->complex[to],
1108 &graph->complex[from]);
1109 graph->complex[from].release ();
1110 return any_change;
1111 }
1112
1113
1114 /* Remove edges involving NODE from GRAPH. */
1115
1116 static void
clear_edges_for_node(constraint_graph_t graph,unsigned int node)1117 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1118 {
1119 if (graph->succs[node])
1120 BITMAP_FREE (graph->succs[node]);
1121 }
1122
1123 /* Merge GRAPH nodes FROM and TO into node TO. */
1124
1125 static void
merge_graph_nodes(constraint_graph_t graph,unsigned int to,unsigned int from)1126 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1127 unsigned int from)
1128 {
1129 if (graph->indirect_cycles[from] != -1)
1130 {
1131 /* If we have indirect cycles with the from node, and we have
1132 none on the to node, the to node has indirect cycles from the
1133 from node now that they are unified.
1134 If indirect cycles exist on both, unify the nodes that they
1135 are in a cycle with, since we know they are in a cycle with
1136 each other. */
1137 if (graph->indirect_cycles[to] == -1)
1138 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1139 }
1140
1141 /* Merge all the successor edges. */
1142 if (graph->succs[from])
1143 {
1144 if (!graph->succs[to])
1145 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1146 bitmap_ior_into (graph->succs[to],
1147 graph->succs[from]);
1148 }
1149
1150 clear_edges_for_node (graph, from);
1151 }
1152
1153
1154 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1155 it doesn't exist in the graph already. */
1156
1157 static void
add_implicit_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1158 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1159 unsigned int from)
1160 {
1161 if (to == from)
1162 return;
1163
1164 if (!graph->implicit_preds[to])
1165 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1166
1167 if (bitmap_set_bit (graph->implicit_preds[to], from))
1168 stats.num_implicit_edges++;
1169 }
1170
1171 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1172 it doesn't exist in the graph already.
1173 Return false if the edge already existed, true otherwise. */
1174
1175 static void
add_pred_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1176 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1177 unsigned int from)
1178 {
1179 if (!graph->preds[to])
1180 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1181 bitmap_set_bit (graph->preds[to], from);
1182 }
1183
1184 /* Add a graph edge to GRAPH, going from FROM to TO if
1185 it doesn't exist in the graph already.
1186 Return false if the edge already existed, true otherwise. */
1187
1188 static bool
add_graph_edge(constraint_graph_t graph,unsigned int to,unsigned int from)1189 add_graph_edge (constraint_graph_t graph, unsigned int to,
1190 unsigned int from)
1191 {
1192 if (to == from)
1193 {
1194 return false;
1195 }
1196 else
1197 {
1198 bool r = false;
1199
1200 if (!graph->succs[from])
1201 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1202
1203 /* The graph solving process does not avoid "triangles", thus
1204 there can be multiple paths from a node to another involving
1205 intermediate other nodes. That causes extra copying which is
1206 most difficult to avoid when the intermediate node is ESCAPED
1207 because there are no edges added from ESCAPED. Avoid
1208 adding the direct edge FROM -> TO when we have FROM -> ESCAPED
1209 and TO contains ESCAPED.
1210 ??? Note this is only a heuristic, it does not prevent the
1211 situation from occuring. The heuristic helps PR38474 and
1212 PR99912 significantly. */
1213 if (to < FIRST_REF_NODE
1214 && bitmap_bit_p (graph->succs[from], find (escaped_id))
1215 && bitmap_bit_p (get_varinfo (find (to))->solution, escaped_id))
1216 return false;
1217
1218 if (bitmap_set_bit (graph->succs[from], to))
1219 {
1220 r = true;
1221 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1222 stats.num_edges++;
1223 }
1224 return r;
1225 }
1226 }
1227
1228
1229 /* Initialize the constraint graph structure to contain SIZE nodes. */
1230
1231 static void
init_graph(unsigned int size)1232 init_graph (unsigned int size)
1233 {
1234 unsigned int j;
1235
1236 graph = XCNEW (struct constraint_graph);
1237 graph->size = size;
1238 graph->succs = XCNEWVEC (bitmap, graph->size);
1239 graph->indirect_cycles = XNEWVEC (int, graph->size);
1240 graph->rep = XNEWVEC (unsigned int, graph->size);
1241 /* ??? Macros do not support template types with multiple arguments,
1242 so we use a typedef to work around it. */
1243 typedef vec<constraint_t> vec_constraint_t_heap;
1244 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1245 graph->pe = XCNEWVEC (unsigned int, graph->size);
1246 graph->pe_rep = XNEWVEC (int, graph->size);
1247
1248 for (j = 0; j < graph->size; j++)
1249 {
1250 graph->rep[j] = j;
1251 graph->pe_rep[j] = -1;
1252 graph->indirect_cycles[j] = -1;
1253 }
1254 }
1255
1256 /* Build the constraint graph, adding only predecessor edges right now. */
1257
1258 static void
build_pred_graph(void)1259 build_pred_graph (void)
1260 {
1261 int i;
1262 constraint_t c;
1263 unsigned int j;
1264
1265 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1266 graph->preds = XCNEWVEC (bitmap, graph->size);
1267 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1268 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1269 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1270 graph->points_to = XCNEWVEC (bitmap, graph->size);
1271 graph->eq_rep = XNEWVEC (int, graph->size);
1272 graph->direct_nodes = sbitmap_alloc (graph->size);
1273 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1274 bitmap_clear (graph->direct_nodes);
1275
1276 for (j = 1; j < FIRST_REF_NODE; j++)
1277 {
1278 if (!get_varinfo (j)->is_special_var)
1279 bitmap_set_bit (graph->direct_nodes, j);
1280 }
1281
1282 for (j = 0; j < graph->size; j++)
1283 graph->eq_rep[j] = -1;
1284
1285 for (j = 0; j < varmap.length (); j++)
1286 graph->indirect_cycles[j] = -1;
1287
1288 FOR_EACH_VEC_ELT (constraints, i, c)
1289 {
1290 struct constraint_expr lhs = c->lhs;
1291 struct constraint_expr rhs = c->rhs;
1292 unsigned int lhsvar = lhs.var;
1293 unsigned int rhsvar = rhs.var;
1294
1295 if (lhs.type == DEREF)
1296 {
1297 /* *x = y. */
1298 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1299 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1300 }
1301 else if (rhs.type == DEREF)
1302 {
1303 /* x = *y */
1304 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1305 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1306 else
1307 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1308 }
1309 else if (rhs.type == ADDRESSOF)
1310 {
1311 varinfo_t v;
1312
1313 /* x = &y */
1314 if (graph->points_to[lhsvar] == NULL)
1315 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1316 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1317
1318 if (graph->pointed_by[rhsvar] == NULL)
1319 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1320 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1321
1322 /* Implicitly, *x = y */
1323 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1324
1325 /* All related variables are no longer direct nodes. */
1326 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1327 v = get_varinfo (rhsvar);
1328 if (!v->is_full_var)
1329 {
1330 v = get_varinfo (v->head);
1331 do
1332 {
1333 bitmap_clear_bit (graph->direct_nodes, v->id);
1334 v = vi_next (v);
1335 }
1336 while (v != NULL);
1337 }
1338 bitmap_set_bit (graph->address_taken, rhsvar);
1339 }
1340 else if (lhsvar > anything_id
1341 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1342 {
1343 /* x = y */
1344 add_pred_graph_edge (graph, lhsvar, rhsvar);
1345 /* Implicitly, *x = *y */
1346 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1347 FIRST_REF_NODE + rhsvar);
1348 }
1349 else if (lhs.offset != 0 || rhs.offset != 0)
1350 {
1351 if (rhs.offset != 0)
1352 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1353 else if (lhs.offset != 0)
1354 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1355 }
1356 }
1357 }
1358
1359 /* Build the constraint graph, adding successor edges. */
1360
1361 static void
build_succ_graph(void)1362 build_succ_graph (void)
1363 {
1364 unsigned i, t;
1365 constraint_t c;
1366
1367 FOR_EACH_VEC_ELT (constraints, i, c)
1368 {
1369 struct constraint_expr lhs;
1370 struct constraint_expr rhs;
1371 unsigned int lhsvar;
1372 unsigned int rhsvar;
1373
1374 if (!c)
1375 continue;
1376
1377 lhs = c->lhs;
1378 rhs = c->rhs;
1379 lhsvar = find (lhs.var);
1380 rhsvar = find (rhs.var);
1381
1382 if (lhs.type == DEREF)
1383 {
1384 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1385 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1386 }
1387 else if (rhs.type == DEREF)
1388 {
1389 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1390 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1391 }
1392 else if (rhs.type == ADDRESSOF)
1393 {
1394 /* x = &y */
1395 gcc_checking_assert (find (rhs.var) == rhs.var);
1396 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1397 }
1398 else if (lhsvar > anything_id
1399 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1400 {
1401 add_graph_edge (graph, lhsvar, rhsvar);
1402 }
1403 }
1404
1405 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1406 receive pointers. */
1407 t = find (storedanything_id);
1408 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1409 {
1410 if (!bitmap_bit_p (graph->direct_nodes, i)
1411 && get_varinfo (i)->may_have_pointers)
1412 add_graph_edge (graph, find (i), t);
1413 }
1414
1415 /* Everything stored to ANYTHING also potentially escapes. */
1416 add_graph_edge (graph, find (escaped_id), t);
1417 }
1418
1419
1420 /* Changed variables on the last iteration. */
1421 static bitmap changed;
1422
1423 /* Strongly Connected Component visitation info. */
1424
1425 class scc_info
1426 {
1427 public:
1428 scc_info (size_t size);
1429 ~scc_info ();
1430
1431 auto_sbitmap visited;
1432 auto_sbitmap deleted;
1433 unsigned int *dfs;
1434 unsigned int *node_mapping;
1435 int current_index;
1436 auto_vec<unsigned> scc_stack;
1437 };
1438
1439
1440 /* Recursive routine to find strongly connected components in GRAPH.
1441 SI is the SCC info to store the information in, and N is the id of current
1442 graph node we are processing.
1443
1444 This is Tarjan's strongly connected component finding algorithm, as
1445 modified by Nuutila to keep only non-root nodes on the stack.
1446 The algorithm can be found in "On finding the strongly connected
1447 connected components in a directed graph" by Esko Nuutila and Eljas
1448 Soisalon-Soininen, in Information Processing Letters volume 49,
1449 number 1, pages 9-14. */
1450
1451 static void
scc_visit(constraint_graph_t graph,class scc_info * si,unsigned int n)1452 scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1453 {
1454 unsigned int i;
1455 bitmap_iterator bi;
1456 unsigned int my_dfs;
1457
1458 bitmap_set_bit (si->visited, n);
1459 si->dfs[n] = si->current_index ++;
1460 my_dfs = si->dfs[n];
1461
1462 /* Visit all the successors. */
1463 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1464 {
1465 unsigned int w;
1466
1467 if (i > LAST_REF_NODE)
1468 break;
1469
1470 w = find (i);
1471 if (bitmap_bit_p (si->deleted, w))
1472 continue;
1473
1474 if (!bitmap_bit_p (si->visited, w))
1475 scc_visit (graph, si, w);
1476
1477 unsigned int t = find (w);
1478 gcc_checking_assert (find (n) == n);
1479 if (si->dfs[t] < si->dfs[n])
1480 si->dfs[n] = si->dfs[t];
1481 }
1482
1483 /* See if any components have been identified. */
1484 if (si->dfs[n] == my_dfs)
1485 {
1486 if (si->scc_stack.length () > 0
1487 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1488 {
1489 bitmap scc = BITMAP_ALLOC (NULL);
1490 unsigned int lowest_node;
1491 bitmap_iterator bi;
1492
1493 bitmap_set_bit (scc, n);
1494
1495 while (si->scc_stack.length () != 0
1496 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1497 {
1498 unsigned int w = si->scc_stack.pop ();
1499
1500 bitmap_set_bit (scc, w);
1501 }
1502
1503 lowest_node = bitmap_first_set_bit (scc);
1504 gcc_assert (lowest_node < FIRST_REF_NODE);
1505
1506 /* Collapse the SCC nodes into a single node, and mark the
1507 indirect cycles. */
1508 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1509 {
1510 if (i < FIRST_REF_NODE)
1511 {
1512 if (unite (lowest_node, i))
1513 unify_nodes (graph, lowest_node, i, false);
1514 }
1515 else
1516 {
1517 unite (lowest_node, i);
1518 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1519 }
1520 }
1521 }
1522 bitmap_set_bit (si->deleted, n);
1523 }
1524 else
1525 si->scc_stack.safe_push (n);
1526 }
1527
1528 /* Unify node FROM into node TO, updating the changed count if
1529 necessary when UPDATE_CHANGED is true. */
1530
1531 static void
unify_nodes(constraint_graph_t graph,unsigned int to,unsigned int from,bool update_changed)1532 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1533 bool update_changed)
1534 {
1535 gcc_checking_assert (to != from && find (to) == to);
1536
1537 if (dump_file && (dump_flags & TDF_DETAILS))
1538 fprintf (dump_file, "Unifying %s to %s\n",
1539 get_varinfo (from)->name,
1540 get_varinfo (to)->name);
1541
1542 if (update_changed)
1543 stats.unified_vars_dynamic++;
1544 else
1545 stats.unified_vars_static++;
1546
1547 merge_graph_nodes (graph, to, from);
1548 if (merge_node_constraints (graph, to, from))
1549 {
1550 if (update_changed)
1551 bitmap_set_bit (changed, to);
1552 }
1553
1554 /* Mark TO as changed if FROM was changed. If TO was already marked
1555 as changed, decrease the changed count. */
1556
1557 if (update_changed
1558 && bitmap_clear_bit (changed, from))
1559 bitmap_set_bit (changed, to);
1560 varinfo_t fromvi = get_varinfo (from);
1561 if (fromvi->solution)
1562 {
1563 /* If the solution changes because of the merging, we need to mark
1564 the variable as changed. */
1565 varinfo_t tovi = get_varinfo (to);
1566 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1567 {
1568 if (update_changed)
1569 bitmap_set_bit (changed, to);
1570 }
1571
1572 BITMAP_FREE (fromvi->solution);
1573 if (fromvi->oldsolution)
1574 BITMAP_FREE (fromvi->oldsolution);
1575
1576 if (stats.iterations > 0
1577 && tovi->oldsolution)
1578 BITMAP_FREE (tovi->oldsolution);
1579 }
1580 if (graph->succs[to])
1581 bitmap_clear_bit (graph->succs[to], to);
1582 }
1583
1584 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1585 starting solution for y. */
1586
1587 static void
do_sd_constraint(constraint_graph_t graph,constraint_t c,bitmap delta,bitmap * expanded_delta)1588 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1589 bitmap delta, bitmap *expanded_delta)
1590 {
1591 unsigned int lhs = c->lhs.var;
1592 bool flag = false;
1593 bitmap sol = get_varinfo (lhs)->solution;
1594 unsigned int j;
1595 bitmap_iterator bi;
1596 HOST_WIDE_INT roffset = c->rhs.offset;
1597
1598 /* Our IL does not allow this. */
1599 gcc_checking_assert (c->lhs.offset == 0);
1600
1601 /* If the solution of Y contains anything it is good enough to transfer
1602 this to the LHS. */
1603 if (bitmap_bit_p (delta, anything_id))
1604 {
1605 flag |= bitmap_set_bit (sol, anything_id);
1606 goto done;
1607 }
1608
1609 /* If we do not know at with offset the rhs is dereferenced compute
1610 the reachability set of DELTA, conservatively assuming it is
1611 dereferenced at all valid offsets. */
1612 if (roffset == UNKNOWN_OFFSET)
1613 {
1614 delta = solution_set_expand (delta, expanded_delta);
1615 /* No further offset processing is necessary. */
1616 roffset = 0;
1617 }
1618
1619 /* For each variable j in delta (Sol(y)), add
1620 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1621 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1622 {
1623 varinfo_t v = get_varinfo (j);
1624 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1625 unsigned HOST_WIDE_INT size = v->size;
1626 unsigned int t;
1627
1628 if (v->is_full_var)
1629 ;
1630 else if (roffset != 0)
1631 {
1632 if (fieldoffset < 0)
1633 v = get_varinfo (v->head);
1634 else
1635 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1636 }
1637
1638 /* We have to include all fields that overlap the current field
1639 shifted by roffset. */
1640 do
1641 {
1642 t = find (v->id);
1643
1644 /* Adding edges from the special vars is pointless.
1645 They don't have sets that can change. */
1646 if (get_varinfo (t)->is_special_var)
1647 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1648 /* Merging the solution from ESCAPED needlessly increases
1649 the set. Use ESCAPED as representative instead. */
1650 else if (v->id == escaped_id)
1651 flag |= bitmap_set_bit (sol, escaped_id);
1652 else if (v->may_have_pointers
1653 && add_graph_edge (graph, lhs, t))
1654 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1655
1656 if (v->is_full_var
1657 || v->next == 0)
1658 break;
1659
1660 v = vi_next (v);
1661 }
1662 while (v->offset < fieldoffset + size);
1663 }
1664
1665 done:
1666 /* If the LHS solution changed, mark the var as changed. */
1667 if (flag)
1668 {
1669 get_varinfo (lhs)->solution = sol;
1670 bitmap_set_bit (changed, lhs);
1671 }
1672 }
1673
1674 /* Process a constraint C that represents *(x + off) = y using DELTA
1675 as the starting solution for x. */
1676
1677 static void
do_ds_constraint(constraint_t c,bitmap delta,bitmap * expanded_delta)1678 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1679 {
1680 unsigned int rhs = c->rhs.var;
1681 bitmap sol = get_varinfo (rhs)->solution;
1682 unsigned int j;
1683 bitmap_iterator bi;
1684 HOST_WIDE_INT loff = c->lhs.offset;
1685 bool escaped_p = false;
1686
1687 /* Our IL does not allow this. */
1688 gcc_checking_assert (c->rhs.offset == 0);
1689
1690 /* If the solution of y contains ANYTHING simply use the ANYTHING
1691 solution. This avoids needlessly increasing the points-to sets. */
1692 if (bitmap_bit_p (sol, anything_id))
1693 sol = get_varinfo (find (anything_id))->solution;
1694
1695 /* If the solution for x contains ANYTHING we have to merge the
1696 solution of y into all pointer variables which we do via
1697 STOREDANYTHING. */
1698 if (bitmap_bit_p (delta, anything_id))
1699 {
1700 unsigned t = find (storedanything_id);
1701 if (add_graph_edge (graph, t, rhs))
1702 {
1703 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1704 bitmap_set_bit (changed, t);
1705 }
1706 return;
1707 }
1708
1709 /* If we do not know at with offset the rhs is dereferenced compute
1710 the reachability set of DELTA, conservatively assuming it is
1711 dereferenced at all valid offsets. */
1712 if (loff == UNKNOWN_OFFSET)
1713 {
1714 delta = solution_set_expand (delta, expanded_delta);
1715 loff = 0;
1716 }
1717
1718 /* For each member j of delta (Sol(x)), add an edge from y to j and
1719 union Sol(y) into Sol(j) */
1720 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1721 {
1722 varinfo_t v = get_varinfo (j);
1723 unsigned int t;
1724 HOST_WIDE_INT fieldoffset = v->offset + loff;
1725 unsigned HOST_WIDE_INT size = v->size;
1726
1727 if (v->is_full_var)
1728 ;
1729 else if (loff != 0)
1730 {
1731 if (fieldoffset < 0)
1732 v = get_varinfo (v->head);
1733 else
1734 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1735 }
1736
1737 /* We have to include all fields that overlap the current field
1738 shifted by loff. */
1739 do
1740 {
1741 if (v->may_have_pointers)
1742 {
1743 /* If v is a global variable then this is an escape point. */
1744 if (v->is_global_var
1745 && !escaped_p)
1746 {
1747 t = find (escaped_id);
1748 if (add_graph_edge (graph, t, rhs)
1749 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1750 bitmap_set_bit (changed, t);
1751 /* Enough to let rhs escape once. */
1752 escaped_p = true;
1753 }
1754
1755 if (v->is_special_var)
1756 break;
1757
1758 t = find (v->id);
1759 if (add_graph_edge (graph, t, rhs)
1760 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1761 bitmap_set_bit (changed, t);
1762 }
1763
1764 if (v->is_full_var
1765 || v->next == 0)
1766 break;
1767
1768 v = vi_next (v);
1769 }
1770 while (v->offset < fieldoffset + size);
1771 }
1772 }
1773
1774 /* Handle a non-simple (simple meaning requires no iteration),
1775 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1776
1777 static void
do_complex_constraint(constraint_graph_t graph,constraint_t c,bitmap delta,bitmap * expanded_delta)1778 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1779 bitmap *expanded_delta)
1780 {
1781 if (c->lhs.type == DEREF)
1782 {
1783 if (c->rhs.type == ADDRESSOF)
1784 {
1785 gcc_unreachable ();
1786 }
1787 else
1788 {
1789 /* *x = y */
1790 do_ds_constraint (c, delta, expanded_delta);
1791 }
1792 }
1793 else if (c->rhs.type == DEREF)
1794 {
1795 /* x = *y */
1796 if (!(get_varinfo (c->lhs.var)->is_special_var))
1797 do_sd_constraint (graph, c, delta, expanded_delta);
1798 }
1799 else
1800 {
1801 bitmap tmp;
1802 bool flag = false;
1803
1804 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1805 && c->rhs.offset != 0 && c->lhs.offset == 0);
1806 tmp = get_varinfo (c->lhs.var)->solution;
1807
1808 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1809 expanded_delta);
1810
1811 if (flag)
1812 bitmap_set_bit (changed, c->lhs.var);
1813 }
1814 }
1815
1816 /* Initialize and return a new SCC info structure. */
1817
scc_info(size_t size)1818 scc_info::scc_info (size_t size) :
1819 visited (size), deleted (size), current_index (0), scc_stack (1)
1820 {
1821 bitmap_clear (visited);
1822 bitmap_clear (deleted);
1823 node_mapping = XNEWVEC (unsigned int, size);
1824 dfs = XCNEWVEC (unsigned int, size);
1825
1826 for (size_t i = 0; i < size; i++)
1827 node_mapping[i] = i;
1828 }
1829
1830 /* Free an SCC info structure pointed to by SI */
1831
~scc_info()1832 scc_info::~scc_info ()
1833 {
1834 free (node_mapping);
1835 free (dfs);
1836 }
1837
1838
1839 /* Find indirect cycles in GRAPH that occur, using strongly connected
1840 components, and note them in the indirect cycles map.
1841
1842 This technique comes from Ben Hardekopf and Calvin Lin,
1843 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1844 Lines of Code", submitted to PLDI 2007. */
1845
1846 static void
find_indirect_cycles(constraint_graph_t graph)1847 find_indirect_cycles (constraint_graph_t graph)
1848 {
1849 unsigned int i;
1850 unsigned int size = graph->size;
1851 scc_info si (size);
1852
1853 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1854 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1855 scc_visit (graph, &si, i);
1856 }
1857
1858 /* Visit the graph in topological order starting at node N, and store the
1859 order in TOPO_ORDER using VISITED to indicate visited nodes. */
1860
1861 static void
topo_visit(constraint_graph_t graph,vec<unsigned> & topo_order,sbitmap visited,unsigned int n)1862 topo_visit (constraint_graph_t graph, vec<unsigned> &topo_order,
1863 sbitmap visited, unsigned int n)
1864 {
1865 bitmap_iterator bi;
1866 unsigned int j;
1867
1868 bitmap_set_bit (visited, n);
1869
1870 if (graph->succs[n])
1871 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1872 {
1873 unsigned k = find (j);
1874 if (!bitmap_bit_p (visited, k))
1875 topo_visit (graph, topo_order, visited, k);
1876 }
1877
1878 topo_order.quick_push (n);
1879 }
1880
1881 /* Compute a topological ordering for GRAPH, and return the result. */
1882
1883 static auto_vec<unsigned>
compute_topo_order(constraint_graph_t graph)1884 compute_topo_order (constraint_graph_t graph)
1885 {
1886 unsigned int i;
1887 unsigned int size = graph->size;
1888
1889 auto_sbitmap visited (size);
1890 bitmap_clear (visited);
1891
1892 /* For the heuristic in add_graph_edge to work optimally make sure to
1893 first visit the connected component of the graph containing
1894 ESCAPED. Do this by extracting the connected component
1895 with ESCAPED and append that to all other components as solve_graph
1896 pops from the order. */
1897 auto_vec<unsigned> tail (size);
1898 topo_visit (graph, tail, visited, find (escaped_id));
1899
1900 auto_vec<unsigned> topo_order (size);
1901
1902 for (i = 0; i != size; ++i)
1903 if (!bitmap_bit_p (visited, i) && find (i) == i)
1904 topo_visit (graph, topo_order, visited, i);
1905
1906 topo_order.splice (tail);
1907 return topo_order;
1908 }
1909
1910 /* Structure used to for hash value numbering of pointer equivalence
1911 classes. */
1912
1913 typedef struct equiv_class_label
1914 {
1915 hashval_t hashcode;
1916 unsigned int equivalence_class;
1917 bitmap labels;
1918 } *equiv_class_label_t;
1919 typedef const struct equiv_class_label *const_equiv_class_label_t;
1920
1921 /* Equiv_class_label hashtable helpers. */
1922
1923 struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label>
1924 {
1925 static inline hashval_t hash (const equiv_class_label *);
1926 static inline bool equal (const equiv_class_label *,
1927 const equiv_class_label *);
1928 };
1929
1930 /* Hash function for a equiv_class_label_t */
1931
1932 inline hashval_t
hash(const equiv_class_label * ecl)1933 equiv_class_hasher::hash (const equiv_class_label *ecl)
1934 {
1935 return ecl->hashcode;
1936 }
1937
1938 /* Equality function for two equiv_class_label_t's. */
1939
1940 inline bool
equal(const equiv_class_label * eql1,const equiv_class_label * eql2)1941 equiv_class_hasher::equal (const equiv_class_label *eql1,
1942 const equiv_class_label *eql2)
1943 {
1944 return (eql1->hashcode == eql2->hashcode
1945 && bitmap_equal_p (eql1->labels, eql2->labels));
1946 }
1947
1948 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1949 classes. */
1950 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1951
1952 /* A hashtable for mapping a bitmap of labels->location equivalence
1953 classes. */
1954 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1955
1956 struct obstack equiv_class_obstack;
1957
1958 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1959 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1960 is equivalent to. */
1961
1962 static equiv_class_label *
equiv_class_lookup_or_add(hash_table<equiv_class_hasher> * table,bitmap labels)1963 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1964 bitmap labels)
1965 {
1966 equiv_class_label **slot;
1967 equiv_class_label ecl;
1968
1969 ecl.labels = labels;
1970 ecl.hashcode = bitmap_hash (labels);
1971 slot = table->find_slot (&ecl, INSERT);
1972 if (!*slot)
1973 {
1974 *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label);
1975 (*slot)->labels = labels;
1976 (*slot)->hashcode = ecl.hashcode;
1977 (*slot)->equivalence_class = 0;
1978 }
1979
1980 return *slot;
1981 }
1982
1983 /* Perform offline variable substitution.
1984
1985 This is a worst case quadratic time way of identifying variables
1986 that must have equivalent points-to sets, including those caused by
1987 static cycles, and single entry subgraphs, in the constraint graph.
1988
1989 The technique is described in "Exploiting Pointer and Location
1990 Equivalence to Optimize Pointer Analysis. In the 14th International
1991 Static Analysis Symposium (SAS), August 2007." It is known as the
1992 "HU" algorithm, and is equivalent to value numbering the collapsed
1993 constraint graph including evaluating unions.
1994
1995 The general method of finding equivalence classes is as follows:
1996 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1997 Initialize all non-REF nodes to be direct nodes.
1998 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1999 variable}
2000 For each constraint containing the dereference, we also do the same
2001 thing.
2002
2003 We then compute SCC's in the graph and unify nodes in the same SCC,
2004 including pts sets.
2005
2006 For each non-collapsed node x:
2007 Visit all unvisited explicit incoming edges.
2008 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2009 where y->x.
2010 Lookup the equivalence class for pts(x).
2011 If we found one, equivalence_class(x) = found class.
2012 Otherwise, equivalence_class(x) = new class, and new_class is
2013 added to the lookup table.
2014
2015 All direct nodes with the same equivalence class can be replaced
2016 with a single representative node.
2017 All unlabeled nodes (label == 0) are not pointers and all edges
2018 involving them can be eliminated.
2019 We perform these optimizations during rewrite_constraints
2020
2021 In addition to pointer equivalence class finding, we also perform
2022 location equivalence class finding. This is the set of variables
2023 that always appear together in points-to sets. We use this to
2024 compress the size of the points-to sets. */
2025
2026 /* Current maximum pointer equivalence class id. */
2027 static int pointer_equiv_class;
2028
2029 /* Current maximum location equivalence class id. */
2030 static int location_equiv_class;
2031
2032 /* Recursive routine to find strongly connected components in GRAPH,
2033 and label it's nodes with DFS numbers. */
2034
2035 static void
condense_visit(constraint_graph_t graph,class scc_info * si,unsigned int n)2036 condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2037 {
2038 unsigned int i;
2039 bitmap_iterator bi;
2040 unsigned int my_dfs;
2041
2042 gcc_checking_assert (si->node_mapping[n] == n);
2043 bitmap_set_bit (si->visited, n);
2044 si->dfs[n] = si->current_index ++;
2045 my_dfs = si->dfs[n];
2046
2047 /* Visit all the successors. */
2048 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2049 {
2050 unsigned int w = si->node_mapping[i];
2051
2052 if (bitmap_bit_p (si->deleted, w))
2053 continue;
2054
2055 if (!bitmap_bit_p (si->visited, w))
2056 condense_visit (graph, si, w);
2057
2058 unsigned int t = si->node_mapping[w];
2059 gcc_checking_assert (si->node_mapping[n] == n);
2060 if (si->dfs[t] < si->dfs[n])
2061 si->dfs[n] = si->dfs[t];
2062 }
2063
2064 /* Visit all the implicit predecessors. */
2065 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2066 {
2067 unsigned int w = si->node_mapping[i];
2068
2069 if (bitmap_bit_p (si->deleted, w))
2070 continue;
2071
2072 if (!bitmap_bit_p (si->visited, w))
2073 condense_visit (graph, si, w);
2074
2075 unsigned int t = si->node_mapping[w];
2076 gcc_assert (si->node_mapping[n] == n);
2077 if (si->dfs[t] < si->dfs[n])
2078 si->dfs[n] = si->dfs[t];
2079 }
2080
2081 /* See if any components have been identified. */
2082 if (si->dfs[n] == my_dfs)
2083 {
2084 if (si->scc_stack.length () != 0
2085 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2086 {
2087 /* Find the first node of the SCC and do non-bitmap work. */
2088 bool direct_p = true;
2089 unsigned first = si->scc_stack.length ();
2090 do
2091 {
2092 --first;
2093 unsigned int w = si->scc_stack[first];
2094 si->node_mapping[w] = n;
2095 if (!bitmap_bit_p (graph->direct_nodes, w))
2096 direct_p = false;
2097 }
2098 while (first > 0
2099 && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2100 if (!direct_p)
2101 bitmap_clear_bit (graph->direct_nodes, n);
2102
2103 /* Want to reduce to node n, push that first. */
2104 si->scc_stack.reserve (1);
2105 si->scc_stack.quick_push (si->scc_stack[first]);
2106 si->scc_stack[first] = n;
2107
2108 unsigned scc_size = si->scc_stack.length () - first;
2109 unsigned split = scc_size / 2;
2110 unsigned carry = scc_size - split * 2;
2111 while (split > 0)
2112 {
2113 for (unsigned i = 0; i < split; ++i)
2114 {
2115 unsigned a = si->scc_stack[first + i];
2116 unsigned b = si->scc_stack[first + split + carry + i];
2117
2118 /* Unify our nodes. */
2119 if (graph->preds[b])
2120 {
2121 if (!graph->preds[a])
2122 std::swap (graph->preds[a], graph->preds[b]);
2123 else
2124 bitmap_ior_into_and_free (graph->preds[a],
2125 &graph->preds[b]);
2126 }
2127 if (graph->implicit_preds[b])
2128 {
2129 if (!graph->implicit_preds[a])
2130 std::swap (graph->implicit_preds[a],
2131 graph->implicit_preds[b]);
2132 else
2133 bitmap_ior_into_and_free (graph->implicit_preds[a],
2134 &graph->implicit_preds[b]);
2135 }
2136 if (graph->points_to[b])
2137 {
2138 if (!graph->points_to[a])
2139 std::swap (graph->points_to[a], graph->points_to[b]);
2140 else
2141 bitmap_ior_into_and_free (graph->points_to[a],
2142 &graph->points_to[b]);
2143 }
2144 }
2145 unsigned remain = split + carry;
2146 split = remain / 2;
2147 carry = remain - split * 2;
2148 }
2149 /* Actually pop the SCC. */
2150 si->scc_stack.truncate (first);
2151 }
2152 bitmap_set_bit (si->deleted, n);
2153 }
2154 else
2155 si->scc_stack.safe_push (n);
2156 }
2157
2158 /* Label pointer equivalences.
2159
2160 This performs a value numbering of the constraint graph to
2161 discover which variables will always have the same points-to sets
2162 under the current set of constraints.
2163
2164 The way it value numbers is to store the set of points-to bits
2165 generated by the constraints and graph edges. This is just used as a
2166 hash and equality comparison. The *actual set of points-to bits* is
2167 completely irrelevant, in that we don't care about being able to
2168 extract them later.
2169
2170 The equality values (currently bitmaps) just have to satisfy a few
2171 constraints, the main ones being:
2172 1. The combining operation must be order independent.
2173 2. The end result of a given set of operations must be unique iff the
2174 combination of input values is unique
2175 3. Hashable. */
2176
2177 static void
label_visit(constraint_graph_t graph,class scc_info * si,unsigned int n)2178 label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2179 {
2180 unsigned int i, first_pred;
2181 bitmap_iterator bi;
2182
2183 bitmap_set_bit (si->visited, n);
2184
2185 /* Label and union our incoming edges's points to sets. */
2186 first_pred = -1U;
2187 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2188 {
2189 unsigned int w = si->node_mapping[i];
2190 if (!bitmap_bit_p (si->visited, w))
2191 label_visit (graph, si, w);
2192
2193 /* Skip unused edges */
2194 if (w == n || graph->pointer_label[w] == 0)
2195 continue;
2196
2197 if (graph->points_to[w])
2198 {
2199 if (!graph->points_to[n])
2200 {
2201 if (first_pred == -1U)
2202 first_pred = w;
2203 else
2204 {
2205 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2206 bitmap_ior (graph->points_to[n],
2207 graph->points_to[first_pred],
2208 graph->points_to[w]);
2209 }
2210 }
2211 else
2212 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2213 }
2214 }
2215
2216 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2217 if (!bitmap_bit_p (graph->direct_nodes, n))
2218 {
2219 if (!graph->points_to[n])
2220 {
2221 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2222 if (first_pred != -1U)
2223 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2224 }
2225 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2226 graph->pointer_label[n] = pointer_equiv_class++;
2227 equiv_class_label_t ecl;
2228 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2229 graph->points_to[n]);
2230 ecl->equivalence_class = graph->pointer_label[n];
2231 return;
2232 }
2233
2234 /* If there was only a single non-empty predecessor the pointer equiv
2235 class is the same. */
2236 if (!graph->points_to[n])
2237 {
2238 if (first_pred != -1U)
2239 {
2240 graph->pointer_label[n] = graph->pointer_label[first_pred];
2241 graph->points_to[n] = graph->points_to[first_pred];
2242 }
2243 return;
2244 }
2245
2246 if (!bitmap_empty_p (graph->points_to[n]))
2247 {
2248 equiv_class_label_t ecl;
2249 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2250 graph->points_to[n]);
2251 if (ecl->equivalence_class == 0)
2252 ecl->equivalence_class = pointer_equiv_class++;
2253 else
2254 {
2255 BITMAP_FREE (graph->points_to[n]);
2256 graph->points_to[n] = ecl->labels;
2257 }
2258 graph->pointer_label[n] = ecl->equivalence_class;
2259 }
2260 }
2261
2262 /* Print the pred graph in dot format. */
2263
2264 static void
dump_pred_graph(class scc_info * si,FILE * file)2265 dump_pred_graph (class scc_info *si, FILE *file)
2266 {
2267 unsigned int i;
2268
2269 /* Only print the graph if it has already been initialized: */
2270 if (!graph)
2271 return;
2272
2273 /* Prints the header of the dot file: */
2274 fprintf (file, "strict digraph {\n");
2275 fprintf (file, " node [\n shape = box\n ]\n");
2276 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2277 fprintf (file, "\n // List of nodes and complex constraints in "
2278 "the constraint graph:\n");
2279
2280 /* The next lines print the nodes in the graph together with the
2281 complex constraints attached to them. */
2282 for (i = 1; i < graph->size; i++)
2283 {
2284 if (i == FIRST_REF_NODE)
2285 continue;
2286 if (si->node_mapping[i] != i)
2287 continue;
2288 if (i < FIRST_REF_NODE)
2289 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2290 else
2291 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2292 if (graph->points_to[i]
2293 && !bitmap_empty_p (graph->points_to[i]))
2294 {
2295 if (i < FIRST_REF_NODE)
2296 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2297 else
2298 fprintf (file, "[label=\"*%s = {",
2299 get_varinfo (i - FIRST_REF_NODE)->name);
2300 unsigned j;
2301 bitmap_iterator bi;
2302 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2303 fprintf (file, " %d", j);
2304 fprintf (file, " }\"]");
2305 }
2306 fprintf (file, ";\n");
2307 }
2308
2309 /* Go over the edges. */
2310 fprintf (file, "\n // Edges in the constraint graph:\n");
2311 for (i = 1; i < graph->size; i++)
2312 {
2313 unsigned j;
2314 bitmap_iterator bi;
2315 if (si->node_mapping[i] != i)
2316 continue;
2317 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2318 {
2319 unsigned from = si->node_mapping[j];
2320 if (from < FIRST_REF_NODE)
2321 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2322 else
2323 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2324 fprintf (file, " -> ");
2325 if (i < FIRST_REF_NODE)
2326 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2327 else
2328 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2329 fprintf (file, ";\n");
2330 }
2331 }
2332
2333 /* Prints the tail of the dot file. */
2334 fprintf (file, "}\n");
2335 }
2336
2337 /* Perform offline variable substitution, discovering equivalence
2338 classes, and eliminating non-pointer variables. */
2339
2340 static class scc_info *
perform_var_substitution(constraint_graph_t graph)2341 perform_var_substitution (constraint_graph_t graph)
2342 {
2343 unsigned int i;
2344 unsigned int size = graph->size;
2345 scc_info *si = new scc_info (size);
2346
2347 bitmap_obstack_initialize (&iteration_obstack);
2348 gcc_obstack_init (&equiv_class_obstack);
2349 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2350 location_equiv_class_table
2351 = new hash_table<equiv_class_hasher> (511);
2352 pointer_equiv_class = 1;
2353 location_equiv_class = 1;
2354
2355 /* Condense the nodes, which means to find SCC's, count incoming
2356 predecessors, and unite nodes in SCC's. */
2357 for (i = 1; i < FIRST_REF_NODE; i++)
2358 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2359 condense_visit (graph, si, si->node_mapping[i]);
2360
2361 if (dump_file && (dump_flags & TDF_GRAPH))
2362 {
2363 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2364 "in dot format:\n");
2365 dump_pred_graph (si, dump_file);
2366 fprintf (dump_file, "\n\n");
2367 }
2368
2369 bitmap_clear (si->visited);
2370 /* Actually the label the nodes for pointer equivalences */
2371 for (i = 1; i < FIRST_REF_NODE; i++)
2372 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2373 label_visit (graph, si, si->node_mapping[i]);
2374
2375 /* Calculate location equivalence labels. */
2376 for (i = 1; i < FIRST_REF_NODE; i++)
2377 {
2378 bitmap pointed_by;
2379 bitmap_iterator bi;
2380 unsigned int j;
2381
2382 if (!graph->pointed_by[i])
2383 continue;
2384 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2385
2386 /* Translate the pointed-by mapping for pointer equivalence
2387 labels. */
2388 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2389 {
2390 bitmap_set_bit (pointed_by,
2391 graph->pointer_label[si->node_mapping[j]]);
2392 }
2393 /* The original pointed_by is now dead. */
2394 BITMAP_FREE (graph->pointed_by[i]);
2395
2396 /* Look up the location equivalence label if one exists, or make
2397 one otherwise. */
2398 equiv_class_label_t ecl;
2399 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2400 if (ecl->equivalence_class == 0)
2401 ecl->equivalence_class = location_equiv_class++;
2402 else
2403 {
2404 if (dump_file && (dump_flags & TDF_DETAILS))
2405 fprintf (dump_file, "Found location equivalence for node %s\n",
2406 get_varinfo (i)->name);
2407 BITMAP_FREE (pointed_by);
2408 }
2409 graph->loc_label[i] = ecl->equivalence_class;
2410
2411 }
2412
2413 if (dump_file && (dump_flags & TDF_DETAILS))
2414 for (i = 1; i < FIRST_REF_NODE; i++)
2415 {
2416 unsigned j = si->node_mapping[i];
2417 if (j != i)
2418 {
2419 fprintf (dump_file, "%s node id %d ",
2420 bitmap_bit_p (graph->direct_nodes, i)
2421 ? "Direct" : "Indirect", i);
2422 if (i < FIRST_REF_NODE)
2423 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2424 else
2425 fprintf (dump_file, "\"*%s\"",
2426 get_varinfo (i - FIRST_REF_NODE)->name);
2427 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2428 if (j < FIRST_REF_NODE)
2429 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2430 else
2431 fprintf (dump_file, "\"*%s\"\n",
2432 get_varinfo (j - FIRST_REF_NODE)->name);
2433 }
2434 else
2435 {
2436 fprintf (dump_file,
2437 "Equivalence classes for %s node id %d ",
2438 bitmap_bit_p (graph->direct_nodes, i)
2439 ? "direct" : "indirect", i);
2440 if (i < FIRST_REF_NODE)
2441 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2442 else
2443 fprintf (dump_file, "\"*%s\"",
2444 get_varinfo (i - FIRST_REF_NODE)->name);
2445 fprintf (dump_file,
2446 ": pointer %d, location %d\n",
2447 graph->pointer_label[i], graph->loc_label[i]);
2448 }
2449 }
2450
2451 /* Quickly eliminate our non-pointer variables. */
2452
2453 for (i = 1; i < FIRST_REF_NODE; i++)
2454 {
2455 unsigned int node = si->node_mapping[i];
2456
2457 if (graph->pointer_label[node] == 0)
2458 {
2459 if (dump_file && (dump_flags & TDF_DETAILS))
2460 fprintf (dump_file,
2461 "%s is a non-pointer variable, eliminating edges.\n",
2462 get_varinfo (node)->name);
2463 stats.nonpointer_vars++;
2464 clear_edges_for_node (graph, node);
2465 }
2466 }
2467
2468 return si;
2469 }
2470
2471 /* Free information that was only necessary for variable
2472 substitution. */
2473
2474 static void
free_var_substitution_info(class scc_info * si)2475 free_var_substitution_info (class scc_info *si)
2476 {
2477 delete si;
2478 free (graph->pointer_label);
2479 free (graph->loc_label);
2480 free (graph->pointed_by);
2481 free (graph->points_to);
2482 free (graph->eq_rep);
2483 sbitmap_free (graph->direct_nodes);
2484 delete pointer_equiv_class_table;
2485 pointer_equiv_class_table = NULL;
2486 delete location_equiv_class_table;
2487 location_equiv_class_table = NULL;
2488 obstack_free (&equiv_class_obstack, NULL);
2489 bitmap_obstack_release (&iteration_obstack);
2490 }
2491
2492 /* Return an existing node that is equivalent to NODE, which has
2493 equivalence class LABEL, if one exists. Return NODE otherwise. */
2494
2495 static unsigned int
find_equivalent_node(constraint_graph_t graph,unsigned int node,unsigned int label)2496 find_equivalent_node (constraint_graph_t graph,
2497 unsigned int node, unsigned int label)
2498 {
2499 /* If the address version of this variable is unused, we can
2500 substitute it for anything else with the same label.
2501 Otherwise, we know the pointers are equivalent, but not the
2502 locations, and we can unite them later. */
2503
2504 if (!bitmap_bit_p (graph->address_taken, node))
2505 {
2506 gcc_checking_assert (label < graph->size);
2507
2508 if (graph->eq_rep[label] != -1)
2509 {
2510 /* Unify the two variables since we know they are equivalent. */
2511 if (unite (graph->eq_rep[label], node))
2512 unify_nodes (graph, graph->eq_rep[label], node, false);
2513 return graph->eq_rep[label];
2514 }
2515 else
2516 {
2517 graph->eq_rep[label] = node;
2518 graph->pe_rep[label] = node;
2519 }
2520 }
2521 else
2522 {
2523 gcc_checking_assert (label < graph->size);
2524 graph->pe[node] = label;
2525 if (graph->pe_rep[label] == -1)
2526 graph->pe_rep[label] = node;
2527 }
2528
2529 return node;
2530 }
2531
2532 /* Unite pointer equivalent but not location equivalent nodes in
2533 GRAPH. This may only be performed once variable substitution is
2534 finished. */
2535
2536 static void
unite_pointer_equivalences(constraint_graph_t graph)2537 unite_pointer_equivalences (constraint_graph_t graph)
2538 {
2539 unsigned int i;
2540
2541 /* Go through the pointer equivalences and unite them to their
2542 representative, if they aren't already. */
2543 for (i = 1; i < FIRST_REF_NODE; i++)
2544 {
2545 unsigned int label = graph->pe[i];
2546 if (label)
2547 {
2548 int label_rep = graph->pe_rep[label];
2549
2550 if (label_rep == -1)
2551 continue;
2552
2553 label_rep = find (label_rep);
2554 if (label_rep >= 0 && unite (label_rep, find (i)))
2555 unify_nodes (graph, label_rep, i, false);
2556 }
2557 }
2558 }
2559
2560 /* Move complex constraints to the GRAPH nodes they belong to. */
2561
2562 static void
move_complex_constraints(constraint_graph_t graph)2563 move_complex_constraints (constraint_graph_t graph)
2564 {
2565 int i;
2566 constraint_t c;
2567
2568 FOR_EACH_VEC_ELT (constraints, i, c)
2569 {
2570 if (c)
2571 {
2572 struct constraint_expr lhs = c->lhs;
2573 struct constraint_expr rhs = c->rhs;
2574
2575 if (lhs.type == DEREF)
2576 {
2577 insert_into_complex (graph, lhs.var, c);
2578 }
2579 else if (rhs.type == DEREF)
2580 {
2581 if (!(get_varinfo (lhs.var)->is_special_var))
2582 insert_into_complex (graph, rhs.var, c);
2583 }
2584 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2585 && (lhs.offset != 0 || rhs.offset != 0))
2586 {
2587 insert_into_complex (graph, rhs.var, c);
2588 }
2589 }
2590 }
2591 }
2592
2593
2594 /* Optimize and rewrite complex constraints while performing
2595 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2596 result of perform_variable_substitution. */
2597
2598 static void
rewrite_constraints(constraint_graph_t graph,class scc_info * si)2599 rewrite_constraints (constraint_graph_t graph,
2600 class scc_info *si)
2601 {
2602 int i;
2603 constraint_t c;
2604
2605 if (flag_checking)
2606 {
2607 for (unsigned int j = 0; j < graph->size; j++)
2608 gcc_assert (find (j) == j);
2609 }
2610
2611 FOR_EACH_VEC_ELT (constraints, i, c)
2612 {
2613 struct constraint_expr lhs = c->lhs;
2614 struct constraint_expr rhs = c->rhs;
2615 unsigned int lhsvar = find (lhs.var);
2616 unsigned int rhsvar = find (rhs.var);
2617 unsigned int lhsnode, rhsnode;
2618 unsigned int lhslabel, rhslabel;
2619
2620 lhsnode = si->node_mapping[lhsvar];
2621 rhsnode = si->node_mapping[rhsvar];
2622 lhslabel = graph->pointer_label[lhsnode];
2623 rhslabel = graph->pointer_label[rhsnode];
2624
2625 /* See if it is really a non-pointer variable, and if so, ignore
2626 the constraint. */
2627 if (lhslabel == 0)
2628 {
2629 if (dump_file && (dump_flags & TDF_DETAILS))
2630 {
2631
2632 fprintf (dump_file, "%s is a non-pointer variable, "
2633 "ignoring constraint:",
2634 get_varinfo (lhs.var)->name);
2635 dump_constraint (dump_file, c);
2636 fprintf (dump_file, "\n");
2637 }
2638 constraints[i] = NULL;
2639 continue;
2640 }
2641
2642 if (rhslabel == 0)
2643 {
2644 if (dump_file && (dump_flags & TDF_DETAILS))
2645 {
2646
2647 fprintf (dump_file, "%s is a non-pointer variable, "
2648 "ignoring constraint:",
2649 get_varinfo (rhs.var)->name);
2650 dump_constraint (dump_file, c);
2651 fprintf (dump_file, "\n");
2652 }
2653 constraints[i] = NULL;
2654 continue;
2655 }
2656
2657 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2658 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2659 c->lhs.var = lhsvar;
2660 c->rhs.var = rhsvar;
2661 }
2662 }
2663
2664 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2665 part of an SCC, false otherwise. */
2666
2667 static bool
eliminate_indirect_cycles(unsigned int node)2668 eliminate_indirect_cycles (unsigned int node)
2669 {
2670 if (graph->indirect_cycles[node] != -1
2671 && !bitmap_empty_p (get_varinfo (node)->solution))
2672 {
2673 unsigned int i;
2674 auto_vec<unsigned> queue;
2675 int queuepos;
2676 unsigned int to = find (graph->indirect_cycles[node]);
2677 bitmap_iterator bi;
2678
2679 /* We can't touch the solution set and call unify_nodes
2680 at the same time, because unify_nodes is going to do
2681 bitmap unions into it. */
2682
2683 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2684 {
2685 if (find (i) == i && i != to)
2686 {
2687 if (unite (to, i))
2688 queue.safe_push (i);
2689 }
2690 }
2691
2692 for (queuepos = 0;
2693 queue.iterate (queuepos, &i);
2694 queuepos++)
2695 {
2696 unify_nodes (graph, to, i, true);
2697 }
2698 return true;
2699 }
2700 return false;
2701 }
2702
2703 /* Solve the constraint graph GRAPH using our worklist solver.
2704 This is based on the PW* family of solvers from the "Efficient Field
2705 Sensitive Pointer Analysis for C" paper.
2706 It works by iterating over all the graph nodes, processing the complex
2707 constraints and propagating the copy constraints, until everything stops
2708 changed. This corresponds to steps 6-8 in the solving list given above. */
2709
2710 static void
solve_graph(constraint_graph_t graph)2711 solve_graph (constraint_graph_t graph)
2712 {
2713 unsigned int size = graph->size;
2714 unsigned int i;
2715 bitmap pts;
2716
2717 changed = BITMAP_ALLOC (NULL);
2718
2719 /* Mark all initial non-collapsed nodes as changed. */
2720 for (i = 1; i < size; i++)
2721 {
2722 varinfo_t ivi = get_varinfo (i);
2723 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2724 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2725 || graph->complex[i].length () > 0))
2726 bitmap_set_bit (changed, i);
2727 }
2728
2729 /* Allocate a bitmap to be used to store the changed bits. */
2730 pts = BITMAP_ALLOC (&pta_obstack);
2731
2732 while (!bitmap_empty_p (changed))
2733 {
2734 unsigned int i;
2735 stats.iterations++;
2736
2737 bitmap_obstack_initialize (&iteration_obstack);
2738
2739 auto_vec<unsigned> topo_order = compute_topo_order (graph);
2740 while (topo_order.length () != 0)
2741 {
2742 i = topo_order.pop ();
2743
2744 /* If this variable is not a representative, skip it. */
2745 if (find (i) != i)
2746 continue;
2747
2748 /* In certain indirect cycle cases, we may merge this
2749 variable to another. */
2750 if (eliminate_indirect_cycles (i) && find (i) != i)
2751 continue;
2752
2753 /* If the node has changed, we need to process the
2754 complex constraints and outgoing edges again. */
2755 if (bitmap_clear_bit (changed, i))
2756 {
2757 unsigned int j;
2758 constraint_t c;
2759 bitmap solution;
2760 vec<constraint_t> complex = graph->complex[i];
2761 varinfo_t vi = get_varinfo (i);
2762 bool solution_empty;
2763
2764 /* Compute the changed set of solution bits. If anything
2765 is in the solution just propagate that. */
2766 if (bitmap_bit_p (vi->solution, anything_id))
2767 {
2768 /* If anything is also in the old solution there is
2769 nothing to do.
2770 ??? But we shouldn't ended up with "changed" set ... */
2771 if (vi->oldsolution
2772 && bitmap_bit_p (vi->oldsolution, anything_id))
2773 continue;
2774 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2775 }
2776 else if (vi->oldsolution)
2777 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2778 else
2779 bitmap_copy (pts, vi->solution);
2780
2781 if (bitmap_empty_p (pts))
2782 continue;
2783
2784 if (vi->oldsolution)
2785 bitmap_ior_into (vi->oldsolution, pts);
2786 else
2787 {
2788 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2789 bitmap_copy (vi->oldsolution, pts);
2790 }
2791
2792 solution = vi->solution;
2793 solution_empty = bitmap_empty_p (solution);
2794
2795 /* Process the complex constraints */
2796 bitmap expanded_pts = NULL;
2797 FOR_EACH_VEC_ELT (complex, j, c)
2798 {
2799 /* XXX: This is going to unsort the constraints in
2800 some cases, which will occasionally add duplicate
2801 constraints during unification. This does not
2802 affect correctness. */
2803 c->lhs.var = find (c->lhs.var);
2804 c->rhs.var = find (c->rhs.var);
2805
2806 /* The only complex constraint that can change our
2807 solution to non-empty, given an empty solution,
2808 is a constraint where the lhs side is receiving
2809 some set from elsewhere. */
2810 if (!solution_empty || c->lhs.type != DEREF)
2811 do_complex_constraint (graph, c, pts, &expanded_pts);
2812 }
2813 BITMAP_FREE (expanded_pts);
2814
2815 solution_empty = bitmap_empty_p (solution);
2816
2817 if (!solution_empty)
2818 {
2819 bitmap_iterator bi;
2820 unsigned eff_escaped_id = find (escaped_id);
2821
2822 /* Propagate solution to all successors. */
2823 unsigned to_remove = ~0U;
2824 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2825 0, j, bi)
2826 {
2827 if (to_remove != ~0U)
2828 {
2829 bitmap_clear_bit (graph->succs[i], to_remove);
2830 to_remove = ~0U;
2831 }
2832 unsigned int to = find (j);
2833 if (to != j)
2834 {
2835 /* Update the succ graph, avoiding duplicate
2836 work. */
2837 to_remove = j;
2838 if (! bitmap_set_bit (graph->succs[i], to))
2839 continue;
2840 /* We eventually end up processing 'to' twice
2841 as it is undefined whether bitmap iteration
2842 iterates over bits set during iteration.
2843 Play safe instead of doing tricks. */
2844 }
2845 /* Don't try to propagate to ourselves. */
2846 if (to == i)
2847 continue;
2848
2849 bitmap tmp = get_varinfo (to)->solution;
2850 bool flag = false;
2851
2852 /* If we propagate from ESCAPED use ESCAPED as
2853 placeholder. */
2854 if (i == eff_escaped_id)
2855 flag = bitmap_set_bit (tmp, escaped_id);
2856 else
2857 flag = bitmap_ior_into (tmp, pts);
2858
2859 if (flag)
2860 bitmap_set_bit (changed, to);
2861 }
2862 if (to_remove != ~0U)
2863 bitmap_clear_bit (graph->succs[i], to_remove);
2864 }
2865 }
2866 }
2867 bitmap_obstack_release (&iteration_obstack);
2868 }
2869
2870 BITMAP_FREE (pts);
2871 BITMAP_FREE (changed);
2872 bitmap_obstack_release (&oldpta_obstack);
2873 }
2874
2875 /* Map from trees to variable infos. */
2876 static hash_map<tree, varinfo_t> *vi_for_tree;
2877
2878
2879 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2880
2881 static void
insert_vi_for_tree(tree t,varinfo_t vi)2882 insert_vi_for_tree (tree t, varinfo_t vi)
2883 {
2884 gcc_assert (vi);
2885 gcc_assert (!vi_for_tree->put (t, vi));
2886 }
2887
2888 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2889 exist in the map, return NULL, otherwise, return the varinfo we found. */
2890
2891 static varinfo_t
lookup_vi_for_tree(tree t)2892 lookup_vi_for_tree (tree t)
2893 {
2894 varinfo_t *slot = vi_for_tree->get (t);
2895 if (slot == NULL)
2896 return NULL;
2897
2898 return *slot;
2899 }
2900
2901 /* Return a printable name for DECL */
2902
2903 static const char *
alias_get_name(tree decl)2904 alias_get_name (tree decl)
2905 {
2906 const char *res = "NULL";
2907 if (dump_file)
2908 {
2909 char *temp = NULL;
2910 if (TREE_CODE (decl) == SSA_NAME)
2911 {
2912 res = get_name (decl);
2913 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2914 }
2915 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2916 && DECL_ASSEMBLER_NAME_SET_P (decl))
2917 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2918 else if (DECL_P (decl))
2919 {
2920 res = get_name (decl);
2921 if (!res)
2922 temp = xasprintf ("D.%u", DECL_UID (decl));
2923 }
2924
2925 if (temp)
2926 {
2927 res = ggc_strdup (temp);
2928 free (temp);
2929 }
2930 }
2931
2932 return res;
2933 }
2934
2935 /* Find the variable id for tree T in the map.
2936 If T doesn't exist in the map, create an entry for it and return it. */
2937
2938 static varinfo_t
get_vi_for_tree(tree t)2939 get_vi_for_tree (tree t)
2940 {
2941 varinfo_t *slot = vi_for_tree->get (t);
2942 if (slot == NULL)
2943 {
2944 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2945 return get_varinfo (id);
2946 }
2947
2948 return *slot;
2949 }
2950
2951 /* Get a scalar constraint expression for a new temporary variable. */
2952
2953 static struct constraint_expr
new_scalar_tmp_constraint_exp(const char * name,bool add_id)2954 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2955 {
2956 struct constraint_expr tmp;
2957 varinfo_t vi;
2958
2959 vi = new_var_info (NULL_TREE, name, add_id);
2960 vi->offset = 0;
2961 vi->size = -1;
2962 vi->fullsize = -1;
2963 vi->is_full_var = 1;
2964 vi->is_reg_var = 1;
2965
2966 tmp.var = vi->id;
2967 tmp.type = SCALAR;
2968 tmp.offset = 0;
2969
2970 return tmp;
2971 }
2972
2973 /* Get a constraint expression vector from an SSA_VAR_P node.
2974 If address_p is true, the result will be taken its address of. */
2975
2976 static void
get_constraint_for_ssa_var(tree t,vec<ce_s> * results,bool address_p)2977 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2978 {
2979 struct constraint_expr cexpr;
2980 varinfo_t vi;
2981
2982 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2983 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2984
2985 if (TREE_CODE (t) == SSA_NAME
2986 && SSA_NAME_IS_DEFAULT_DEF (t))
2987 {
2988 /* For parameters, get at the points-to set for the actual parm
2989 decl. */
2990 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2991 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2992 {
2993 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2994 return;
2995 }
2996 /* For undefined SSA names return nothing. */
2997 else if (!ssa_defined_default_def_p (t))
2998 {
2999 cexpr.var = nothing_id;
3000 cexpr.type = SCALAR;
3001 cexpr.offset = 0;
3002 results->safe_push (cexpr);
3003 return;
3004 }
3005 }
3006
3007 /* For global variables resort to the alias target. */
3008 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3009 {
3010 varpool_node *node = varpool_node::get (t);
3011 if (node && node->alias && node->analyzed)
3012 {
3013 node = node->ultimate_alias_target ();
3014 /* Canonicalize the PT uid of all aliases to the ultimate target.
3015 ??? Hopefully the set of aliases can't change in a way that
3016 changes the ultimate alias target. */
3017 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3018 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3019 && (! DECL_PT_UID_SET_P (t)
3020 || DECL_PT_UID (t) == DECL_UID (node->decl)));
3021 DECL_PT_UID (t) = DECL_UID (node->decl);
3022 t = node->decl;
3023 }
3024
3025 /* If this is decl may bind to NULL note that. */
3026 if (address_p
3027 && (! node || ! node->nonzero_address ()))
3028 {
3029 cexpr.var = nothing_id;
3030 cexpr.type = SCALAR;
3031 cexpr.offset = 0;
3032 results->safe_push (cexpr);
3033 }
3034 }
3035
3036 vi = get_vi_for_tree (t);
3037 cexpr.var = vi->id;
3038 cexpr.type = SCALAR;
3039 cexpr.offset = 0;
3040
3041 /* If we are not taking the address of the constraint expr, add all
3042 sub-fiels of the variable as well. */
3043 if (!address_p
3044 && !vi->is_full_var)
3045 {
3046 for (; vi; vi = vi_next (vi))
3047 {
3048 cexpr.var = vi->id;
3049 results->safe_push (cexpr);
3050 }
3051 return;
3052 }
3053
3054 results->safe_push (cexpr);
3055 }
3056
3057 /* Process constraint T, performing various simplifications and then
3058 adding it to our list of overall constraints. */
3059
3060 static void
process_constraint(constraint_t t)3061 process_constraint (constraint_t t)
3062 {
3063 struct constraint_expr rhs = t->rhs;
3064 struct constraint_expr lhs = t->lhs;
3065
3066 gcc_assert (rhs.var < varmap.length ());
3067 gcc_assert (lhs.var < varmap.length ());
3068
3069 /* If we didn't get any useful constraint from the lhs we get
3070 &ANYTHING as fallback from get_constraint_for. Deal with
3071 it here by turning it into *ANYTHING. */
3072 if (lhs.type == ADDRESSOF
3073 && lhs.var == anything_id)
3074 lhs.type = DEREF;
3075
3076 /* ADDRESSOF on the lhs is invalid. */
3077 gcc_assert (lhs.type != ADDRESSOF);
3078
3079 /* We shouldn't add constraints from things that cannot have pointers.
3080 It's not completely trivial to avoid in the callers, so do it here. */
3081 if (rhs.type != ADDRESSOF
3082 && !get_varinfo (rhs.var)->may_have_pointers)
3083 return;
3084
3085 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3086 if (!get_varinfo (lhs.var)->may_have_pointers)
3087 return;
3088
3089 /* This can happen in our IR with things like n->a = *p */
3090 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3091 {
3092 /* Split into tmp = *rhs, *lhs = tmp */
3093 struct constraint_expr tmplhs;
3094 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3095 process_constraint (new_constraint (tmplhs, rhs));
3096 process_constraint (new_constraint (lhs, tmplhs));
3097 }
3098 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3099 {
3100 /* Split into tmp = &rhs, *lhs = tmp */
3101 struct constraint_expr tmplhs;
3102 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3103 process_constraint (new_constraint (tmplhs, rhs));
3104 process_constraint (new_constraint (lhs, tmplhs));
3105 }
3106 else
3107 {
3108 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3109 if (rhs.type == ADDRESSOF)
3110 get_varinfo (get_varinfo (rhs.var)->head)->address_taken = true;
3111 constraints.safe_push (t);
3112 }
3113 }
3114
3115
3116 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3117 structure. */
3118
3119 static HOST_WIDE_INT
bitpos_of_field(const tree fdecl)3120 bitpos_of_field (const tree fdecl)
3121 {
3122 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3123 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3124 return -1;
3125
3126 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3127 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3128 }
3129
3130
3131 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3132 resulting constraint expressions in *RESULTS. */
3133
3134 static void
get_constraint_for_ptr_offset(tree ptr,tree offset,vec<ce_s> * results)3135 get_constraint_for_ptr_offset (tree ptr, tree offset,
3136 vec<ce_s> *results)
3137 {
3138 struct constraint_expr c;
3139 unsigned int j, n;
3140 HOST_WIDE_INT rhsoffset;
3141
3142 /* If we do not do field-sensitive PTA adding offsets to pointers
3143 does not change the points-to solution. */
3144 if (!use_field_sensitive)
3145 {
3146 get_constraint_for_rhs (ptr, results);
3147 return;
3148 }
3149
3150 /* If the offset is not a non-negative integer constant that fits
3151 in a HOST_WIDE_INT, we have to fall back to a conservative
3152 solution which includes all sub-fields of all pointed-to
3153 variables of ptr. */
3154 if (offset == NULL_TREE
3155 || TREE_CODE (offset) != INTEGER_CST)
3156 rhsoffset = UNKNOWN_OFFSET;
3157 else
3158 {
3159 /* Sign-extend the offset. */
3160 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3161 if (!wi::fits_shwi_p (soffset))
3162 rhsoffset = UNKNOWN_OFFSET;
3163 else
3164 {
3165 /* Make sure the bit-offset also fits. */
3166 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3167 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3168 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3169 rhsoffset = UNKNOWN_OFFSET;
3170 }
3171 }
3172
3173 get_constraint_for_rhs (ptr, results);
3174 if (rhsoffset == 0)
3175 return;
3176
3177 /* As we are eventually appending to the solution do not use
3178 vec::iterate here. */
3179 n = results->length ();
3180 for (j = 0; j < n; j++)
3181 {
3182 varinfo_t curr;
3183 c = (*results)[j];
3184 curr = get_varinfo (c.var);
3185
3186 if (c.type == ADDRESSOF
3187 /* If this varinfo represents a full variable just use it. */
3188 && curr->is_full_var)
3189 ;
3190 else if (c.type == ADDRESSOF
3191 /* If we do not know the offset add all subfields. */
3192 && rhsoffset == UNKNOWN_OFFSET)
3193 {
3194 varinfo_t temp = get_varinfo (curr->head);
3195 do
3196 {
3197 struct constraint_expr c2;
3198 c2.var = temp->id;
3199 c2.type = ADDRESSOF;
3200 c2.offset = 0;
3201 if (c2.var != c.var)
3202 results->safe_push (c2);
3203 temp = vi_next (temp);
3204 }
3205 while (temp);
3206 }
3207 else if (c.type == ADDRESSOF)
3208 {
3209 varinfo_t temp;
3210 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3211
3212 /* If curr->offset + rhsoffset is less than zero adjust it. */
3213 if (rhsoffset < 0
3214 && curr->offset < offset)
3215 offset = 0;
3216
3217 /* We have to include all fields that overlap the current
3218 field shifted by rhsoffset. And we include at least
3219 the last or the first field of the variable to represent
3220 reachability of off-bound addresses, in particular &object + 1,
3221 conservatively correct. */
3222 temp = first_or_preceding_vi_for_offset (curr, offset);
3223 c.var = temp->id;
3224 c.offset = 0;
3225 temp = vi_next (temp);
3226 while (temp
3227 && temp->offset < offset + curr->size)
3228 {
3229 struct constraint_expr c2;
3230 c2.var = temp->id;
3231 c2.type = ADDRESSOF;
3232 c2.offset = 0;
3233 results->safe_push (c2);
3234 temp = vi_next (temp);
3235 }
3236 }
3237 else if (c.type == SCALAR)
3238 {
3239 gcc_assert (c.offset == 0);
3240 c.offset = rhsoffset;
3241 }
3242 else
3243 /* We shouldn't get any DEREFs here. */
3244 gcc_unreachable ();
3245
3246 (*results)[j] = c;
3247 }
3248 }
3249
3250
3251 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3252 If address_p is true the result will be taken its address of.
3253 If lhs_p is true then the constraint expression is assumed to be used
3254 as the lhs. */
3255
3256 static void
get_constraint_for_component_ref(tree t,vec<ce_s> * results,bool address_p,bool lhs_p)3257 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3258 bool address_p, bool lhs_p)
3259 {
3260 tree orig_t = t;
3261 poly_int64 bitsize = -1;
3262 poly_int64 bitmaxsize = -1;
3263 poly_int64 bitpos;
3264 bool reverse;
3265 tree forzero;
3266
3267 /* Some people like to do cute things like take the address of
3268 &0->a.b */
3269 forzero = t;
3270 while (handled_component_p (forzero)
3271 || INDIRECT_REF_P (forzero)
3272 || TREE_CODE (forzero) == MEM_REF)
3273 forzero = TREE_OPERAND (forzero, 0);
3274
3275 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3276 {
3277 struct constraint_expr temp;
3278
3279 temp.offset = 0;
3280 temp.var = integer_id;
3281 temp.type = SCALAR;
3282 results->safe_push (temp);
3283 return;
3284 }
3285
3286 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3287
3288 /* We can end up here for component references on a
3289 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3290 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3291 symbolic constants simply give up. */
3292 if (TREE_CODE (t) == ADDR_EXPR)
3293 {
3294 constraint_expr result;
3295 result.type = SCALAR;
3296 result.var = anything_id;
3297 result.offset = 0;
3298 results->safe_push (result);
3299 return;
3300 }
3301
3302 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3303 offsets directly. Pretend to take the address of the base,
3304 we'll take care of adding the required subset of sub-fields below. */
3305 if (TREE_CODE (t) == MEM_REF
3306 && !integer_zerop (TREE_OPERAND (t, 0)))
3307 {
3308 poly_offset_int off = mem_ref_offset (t);
3309 off <<= LOG2_BITS_PER_UNIT;
3310 off += bitpos;
3311 poly_int64 off_hwi;
3312 if (off.to_shwi (&off_hwi))
3313 bitpos = off_hwi;
3314 else
3315 {
3316 bitpos = 0;
3317 bitmaxsize = -1;
3318 }
3319 get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3320 do_deref (results);
3321 }
3322 else
3323 get_constraint_for_1 (t, results, true, lhs_p);
3324
3325 /* Strip off nothing_id. */
3326 if (results->length () == 2)
3327 {
3328 gcc_assert ((*results)[0].var == nothing_id);
3329 results->unordered_remove (0);
3330 }
3331 gcc_assert (results->length () == 1);
3332 struct constraint_expr &result = results->last ();
3333
3334 if (result.type == SCALAR
3335 && get_varinfo (result.var)->is_full_var)
3336 /* For single-field vars do not bother about the offset. */
3337 result.offset = 0;
3338 else if (result.type == SCALAR)
3339 {
3340 /* In languages like C, you can access one past the end of an
3341 array. You aren't allowed to dereference it, so we can
3342 ignore this constraint. When we handle pointer subtraction,
3343 we may have to do something cute here. */
3344
3345 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3346 && maybe_ne (bitmaxsize, 0))
3347 {
3348 /* It's also not true that the constraint will actually start at the
3349 right offset, it may start in some padding. We only care about
3350 setting the constraint to the first actual field it touches, so
3351 walk to find it. */
3352 struct constraint_expr cexpr = result;
3353 varinfo_t curr;
3354 results->pop ();
3355 cexpr.offset = 0;
3356 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3357 {
3358 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3359 curr->size, bitpos, bitmaxsize))
3360 {
3361 cexpr.var = curr->id;
3362 results->safe_push (cexpr);
3363 if (address_p)
3364 break;
3365 }
3366 }
3367 /* If we are going to take the address of this field then
3368 to be able to compute reachability correctly add at least
3369 the last field of the variable. */
3370 if (address_p && results->length () == 0)
3371 {
3372 curr = get_varinfo (cexpr.var);
3373 while (curr->next != 0)
3374 curr = vi_next (curr);
3375 cexpr.var = curr->id;
3376 results->safe_push (cexpr);
3377 }
3378 else if (results->length () == 0)
3379 /* Assert that we found *some* field there. The user couldn't be
3380 accessing *only* padding. */
3381 /* Still the user could access one past the end of an array
3382 embedded in a struct resulting in accessing *only* padding. */
3383 /* Or accessing only padding via type-punning to a type
3384 that has a filed just in padding space. */
3385 {
3386 cexpr.type = SCALAR;
3387 cexpr.var = anything_id;
3388 cexpr.offset = 0;
3389 results->safe_push (cexpr);
3390 }
3391 }
3392 else if (known_eq (bitmaxsize, 0))
3393 {
3394 if (dump_file && (dump_flags & TDF_DETAILS))
3395 fprintf (dump_file, "Access to zero-sized part of variable, "
3396 "ignoring\n");
3397 }
3398 else
3399 if (dump_file && (dump_flags & TDF_DETAILS))
3400 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3401 }
3402 else if (result.type == DEREF)
3403 {
3404 /* If we do not know exactly where the access goes say so. Note
3405 that only for non-structure accesses we know that we access
3406 at most one subfiled of any variable. */
3407 HOST_WIDE_INT const_bitpos;
3408 if (!bitpos.is_constant (&const_bitpos)
3409 || const_bitpos == -1
3410 || maybe_ne (bitsize, bitmaxsize)
3411 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3412 || result.offset == UNKNOWN_OFFSET)
3413 result.offset = UNKNOWN_OFFSET;
3414 else
3415 result.offset += const_bitpos;
3416 }
3417 else if (result.type == ADDRESSOF)
3418 {
3419 /* We can end up here for component references on constants like
3420 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3421 result.type = SCALAR;
3422 result.var = anything_id;
3423 result.offset = 0;
3424 }
3425 else
3426 gcc_unreachable ();
3427 }
3428
3429
3430 /* Dereference the constraint expression CONS, and return the result.
3431 DEREF (ADDRESSOF) = SCALAR
3432 DEREF (SCALAR) = DEREF
3433 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3434 This is needed so that we can handle dereferencing DEREF constraints. */
3435
3436 static void
do_deref(vec<ce_s> * constraints)3437 do_deref (vec<ce_s> *constraints)
3438 {
3439 struct constraint_expr *c;
3440 unsigned int i = 0;
3441
3442 FOR_EACH_VEC_ELT (*constraints, i, c)
3443 {
3444 if (c->type == SCALAR)
3445 c->type = DEREF;
3446 else if (c->type == ADDRESSOF)
3447 c->type = SCALAR;
3448 else if (c->type == DEREF)
3449 {
3450 struct constraint_expr tmplhs;
3451 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3452 process_constraint (new_constraint (tmplhs, *c));
3453 c->var = tmplhs.var;
3454 }
3455 else
3456 gcc_unreachable ();
3457 }
3458 }
3459
3460 /* Given a tree T, return the constraint expression for taking the
3461 address of it. */
3462
3463 static void
get_constraint_for_address_of(tree t,vec<ce_s> * results)3464 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3465 {
3466 struct constraint_expr *c;
3467 unsigned int i;
3468
3469 get_constraint_for_1 (t, results, true, true);
3470
3471 FOR_EACH_VEC_ELT (*results, i, c)
3472 {
3473 if (c->type == DEREF)
3474 c->type = SCALAR;
3475 else
3476 c->type = ADDRESSOF;
3477 }
3478 }
3479
3480 /* Given a tree T, return the constraint expression for it. */
3481
3482 static void
get_constraint_for_1(tree t,vec<ce_s> * results,bool address_p,bool lhs_p)3483 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3484 bool lhs_p)
3485 {
3486 struct constraint_expr temp;
3487
3488 /* x = integer is all glommed to a single variable, which doesn't
3489 point to anything by itself. That is, of course, unless it is an
3490 integer constant being treated as a pointer, in which case, we
3491 will return that this is really the addressof anything. This
3492 happens below, since it will fall into the default case. The only
3493 case we know something about an integer treated like a pointer is
3494 when it is the NULL pointer, and then we just say it points to
3495 NULL.
3496
3497 Do not do that if -fno-delete-null-pointer-checks though, because
3498 in that case *NULL does not fail, so it _should_ alias *anything.
3499 It is not worth adding a new option or renaming the existing one,
3500 since this case is relatively obscure. */
3501 if ((TREE_CODE (t) == INTEGER_CST
3502 && integer_zerop (t))
3503 /* The only valid CONSTRUCTORs in gimple with pointer typed
3504 elements are zero-initializer. But in IPA mode we also
3505 process global initializers, so verify at least. */
3506 || (TREE_CODE (t) == CONSTRUCTOR
3507 && CONSTRUCTOR_NELTS (t) == 0))
3508 {
3509 if (flag_delete_null_pointer_checks)
3510 temp.var = nothing_id;
3511 else
3512 temp.var = nonlocal_id;
3513 temp.type = ADDRESSOF;
3514 temp.offset = 0;
3515 results->safe_push (temp);
3516 return;
3517 }
3518
3519 /* String constants are read-only, ideally we'd have a CONST_DECL
3520 for those. */
3521 if (TREE_CODE (t) == STRING_CST)
3522 {
3523 temp.var = string_id;
3524 temp.type = SCALAR;
3525 temp.offset = 0;
3526 results->safe_push (temp);
3527 return;
3528 }
3529
3530 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3531 {
3532 case tcc_expression:
3533 {
3534 switch (TREE_CODE (t))
3535 {
3536 case ADDR_EXPR:
3537 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3538 return;
3539 default:;
3540 }
3541 break;
3542 }
3543 case tcc_reference:
3544 {
3545 switch (TREE_CODE (t))
3546 {
3547 case MEM_REF:
3548 {
3549 struct constraint_expr cs;
3550 varinfo_t vi, curr;
3551 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3552 TREE_OPERAND (t, 1), results);
3553 do_deref (results);
3554
3555 /* If we are not taking the address then make sure to process
3556 all subvariables we might access. */
3557 if (address_p)
3558 return;
3559
3560 cs = results->last ();
3561 if (cs.type == DEREF
3562 && type_can_have_subvars (TREE_TYPE (t)))
3563 {
3564 /* For dereferences this means we have to defer it
3565 to solving time. */
3566 results->last ().offset = UNKNOWN_OFFSET;
3567 return;
3568 }
3569 if (cs.type != SCALAR)
3570 return;
3571
3572 vi = get_varinfo (cs.var);
3573 curr = vi_next (vi);
3574 if (!vi->is_full_var
3575 && curr)
3576 {
3577 unsigned HOST_WIDE_INT size;
3578 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3579 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3580 else
3581 size = -1;
3582 for (; curr; curr = vi_next (curr))
3583 {
3584 if (curr->offset - vi->offset < size)
3585 {
3586 cs.var = curr->id;
3587 results->safe_push (cs);
3588 }
3589 else
3590 break;
3591 }
3592 }
3593 return;
3594 }
3595 case ARRAY_REF:
3596 case ARRAY_RANGE_REF:
3597 case COMPONENT_REF:
3598 case IMAGPART_EXPR:
3599 case REALPART_EXPR:
3600 case BIT_FIELD_REF:
3601 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3602 return;
3603 case VIEW_CONVERT_EXPR:
3604 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3605 lhs_p);
3606 return;
3607 /* We are missing handling for TARGET_MEM_REF here. */
3608 default:;
3609 }
3610 break;
3611 }
3612 case tcc_exceptional:
3613 {
3614 switch (TREE_CODE (t))
3615 {
3616 case SSA_NAME:
3617 {
3618 get_constraint_for_ssa_var (t, results, address_p);
3619 return;
3620 }
3621 case CONSTRUCTOR:
3622 {
3623 unsigned int i;
3624 tree val;
3625 auto_vec<ce_s> tmp;
3626 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3627 {
3628 struct constraint_expr *rhsp;
3629 unsigned j;
3630 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3631 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3632 results->safe_push (*rhsp);
3633 tmp.truncate (0);
3634 }
3635 /* We do not know whether the constructor was complete,
3636 so technically we have to add &NOTHING or &ANYTHING
3637 like we do for an empty constructor as well. */
3638 return;
3639 }
3640 default:;
3641 }
3642 break;
3643 }
3644 case tcc_declaration:
3645 {
3646 get_constraint_for_ssa_var (t, results, address_p);
3647 return;
3648 }
3649 case tcc_constant:
3650 {
3651 /* We cannot refer to automatic variables through constants. */
3652 temp.type = ADDRESSOF;
3653 temp.var = nonlocal_id;
3654 temp.offset = 0;
3655 results->safe_push (temp);
3656 return;
3657 }
3658 default:;
3659 }
3660
3661 /* The default fallback is a constraint from anything. */
3662 temp.type = ADDRESSOF;
3663 temp.var = anything_id;
3664 temp.offset = 0;
3665 results->safe_push (temp);
3666 }
3667
3668 /* Given a gimple tree T, return the constraint expression vector for it. */
3669
3670 static void
get_constraint_for(tree t,vec<ce_s> * results)3671 get_constraint_for (tree t, vec<ce_s> *results)
3672 {
3673 gcc_assert (results->length () == 0);
3674
3675 get_constraint_for_1 (t, results, false, true);
3676 }
3677
3678 /* Given a gimple tree T, return the constraint expression vector for it
3679 to be used as the rhs of a constraint. */
3680
3681 static void
get_constraint_for_rhs(tree t,vec<ce_s> * results)3682 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3683 {
3684 gcc_assert (results->length () == 0);
3685
3686 get_constraint_for_1 (t, results, false, false);
3687 }
3688
3689
3690 /* Efficiently generates constraints from all entries in *RHSC to all
3691 entries in *LHSC. */
3692
3693 static void
process_all_all_constraints(const vec<ce_s> & lhsc,const vec<ce_s> & rhsc)3694 process_all_all_constraints (const vec<ce_s> &lhsc,
3695 const vec<ce_s> &rhsc)
3696 {
3697 struct constraint_expr *lhsp, *rhsp;
3698 unsigned i, j;
3699
3700 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3701 {
3702 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3703 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3704 process_constraint (new_constraint (*lhsp, *rhsp));
3705 }
3706 else
3707 {
3708 struct constraint_expr tmp;
3709 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3710 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3711 process_constraint (new_constraint (tmp, *rhsp));
3712 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3713 process_constraint (new_constraint (*lhsp, tmp));
3714 }
3715 }
3716
3717 /* Handle aggregate copies by expanding into copies of the respective
3718 fields of the structures. */
3719
3720 static void
do_structure_copy(tree lhsop,tree rhsop)3721 do_structure_copy (tree lhsop, tree rhsop)
3722 {
3723 struct constraint_expr *lhsp, *rhsp;
3724 auto_vec<ce_s> lhsc;
3725 auto_vec<ce_s> rhsc;
3726 unsigned j;
3727
3728 get_constraint_for (lhsop, &lhsc);
3729 get_constraint_for_rhs (rhsop, &rhsc);
3730 lhsp = &lhsc[0];
3731 rhsp = &rhsc[0];
3732 if (lhsp->type == DEREF
3733 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3734 || rhsp->type == DEREF)
3735 {
3736 if (lhsp->type == DEREF)
3737 {
3738 gcc_assert (lhsc.length () == 1);
3739 lhsp->offset = UNKNOWN_OFFSET;
3740 }
3741 if (rhsp->type == DEREF)
3742 {
3743 gcc_assert (rhsc.length () == 1);
3744 rhsp->offset = UNKNOWN_OFFSET;
3745 }
3746 process_all_all_constraints (lhsc, rhsc);
3747 }
3748 else if (lhsp->type == SCALAR
3749 && (rhsp->type == SCALAR
3750 || rhsp->type == ADDRESSOF))
3751 {
3752 HOST_WIDE_INT lhssize, lhsoffset;
3753 HOST_WIDE_INT rhssize, rhsoffset;
3754 bool reverse;
3755 unsigned k = 0;
3756 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3757 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3758 &reverse))
3759 {
3760 process_all_all_constraints (lhsc, rhsc);
3761 return;
3762 }
3763 for (j = 0; lhsc.iterate (j, &lhsp);)
3764 {
3765 varinfo_t lhsv, rhsv;
3766 rhsp = &rhsc[k];
3767 lhsv = get_varinfo (lhsp->var);
3768 rhsv = get_varinfo (rhsp->var);
3769 if (lhsv->may_have_pointers
3770 && (lhsv->is_full_var
3771 || rhsv->is_full_var
3772 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3773 rhsv->offset + lhsoffset, rhsv->size)))
3774 process_constraint (new_constraint (*lhsp, *rhsp));
3775 if (!rhsv->is_full_var
3776 && (lhsv->is_full_var
3777 || (lhsv->offset + rhsoffset + lhsv->size
3778 > rhsv->offset + lhsoffset + rhsv->size)))
3779 {
3780 ++k;
3781 if (k >= rhsc.length ())
3782 break;
3783 }
3784 else
3785 ++j;
3786 }
3787 }
3788 else
3789 gcc_unreachable ();
3790 }
3791
3792 /* Create constraints ID = { rhsc }. */
3793
3794 static void
make_constraints_to(unsigned id,const vec<ce_s> & rhsc)3795 make_constraints_to (unsigned id, const vec<ce_s> &rhsc)
3796 {
3797 struct constraint_expr *c;
3798 struct constraint_expr includes;
3799 unsigned int j;
3800
3801 includes.var = id;
3802 includes.offset = 0;
3803 includes.type = SCALAR;
3804
3805 FOR_EACH_VEC_ELT (rhsc, j, c)
3806 process_constraint (new_constraint (includes, *c));
3807 }
3808
3809 /* Create a constraint ID = OP. */
3810
3811 static void
make_constraint_to(unsigned id,tree op)3812 make_constraint_to (unsigned id, tree op)
3813 {
3814 auto_vec<ce_s> rhsc;
3815 get_constraint_for_rhs (op, &rhsc);
3816 make_constraints_to (id, rhsc);
3817 }
3818
3819 /* Create a constraint ID = &FROM. */
3820
3821 static void
make_constraint_from(varinfo_t vi,int from)3822 make_constraint_from (varinfo_t vi, int from)
3823 {
3824 struct constraint_expr lhs, rhs;
3825
3826 lhs.var = vi->id;
3827 lhs.offset = 0;
3828 lhs.type = SCALAR;
3829
3830 rhs.var = from;
3831 rhs.offset = 0;
3832 rhs.type = ADDRESSOF;
3833 process_constraint (new_constraint (lhs, rhs));
3834 }
3835
3836 /* Create a constraint ID = FROM. */
3837
3838 static void
make_copy_constraint(varinfo_t vi,int from)3839 make_copy_constraint (varinfo_t vi, int from)
3840 {
3841 struct constraint_expr lhs, rhs;
3842
3843 lhs.var = vi->id;
3844 lhs.offset = 0;
3845 lhs.type = SCALAR;
3846
3847 rhs.var = from;
3848 rhs.offset = 0;
3849 rhs.type = SCALAR;
3850 process_constraint (new_constraint (lhs, rhs));
3851 }
3852
3853 /* Make constraints necessary to make OP escape. */
3854
3855 static void
make_escape_constraint(tree op)3856 make_escape_constraint (tree op)
3857 {
3858 make_constraint_to (escaped_id, op);
3859 }
3860
3861 /* Make constraint necessary to make all indirect references
3862 from VI escape. */
3863
3864 static void
make_indirect_escape_constraint(varinfo_t vi)3865 make_indirect_escape_constraint (varinfo_t vi)
3866 {
3867 struct constraint_expr lhs, rhs;
3868 /* escaped = *(VAR + UNKNOWN); */
3869 lhs.type = SCALAR;
3870 lhs.var = escaped_id;
3871 lhs.offset = 0;
3872 rhs.type = DEREF;
3873 rhs.var = vi->id;
3874 rhs.offset = UNKNOWN_OFFSET;
3875 process_constraint (new_constraint (lhs, rhs));
3876 }
3877
3878 /* Add constraints to that the solution of VI is transitively closed. */
3879
3880 static void
make_transitive_closure_constraints(varinfo_t vi)3881 make_transitive_closure_constraints (varinfo_t vi)
3882 {
3883 struct constraint_expr lhs, rhs;
3884
3885 /* VAR = *(VAR + UNKNOWN); */
3886 lhs.type = SCALAR;
3887 lhs.var = vi->id;
3888 lhs.offset = 0;
3889 rhs.type = DEREF;
3890 rhs.var = vi->id;
3891 rhs.offset = UNKNOWN_OFFSET;
3892 process_constraint (new_constraint (lhs, rhs));
3893 }
3894
3895 /* Add constraints to that the solution of VI has all subvariables added. */
3896
3897 static void
make_any_offset_constraints(varinfo_t vi)3898 make_any_offset_constraints (varinfo_t vi)
3899 {
3900 struct constraint_expr lhs, rhs;
3901
3902 /* VAR = VAR + UNKNOWN; */
3903 lhs.type = SCALAR;
3904 lhs.var = vi->id;
3905 lhs.offset = 0;
3906 rhs.type = SCALAR;
3907 rhs.var = vi->id;
3908 rhs.offset = UNKNOWN_OFFSET;
3909 process_constraint (new_constraint (lhs, rhs));
3910 }
3911
3912 /* Temporary storage for fake var decls. */
3913 struct obstack fake_var_decl_obstack;
3914
3915 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3916
3917 static tree
build_fake_var_decl(tree type)3918 build_fake_var_decl (tree type)
3919 {
3920 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3921 memset (decl, 0, sizeof (struct tree_var_decl));
3922 TREE_SET_CODE (decl, VAR_DECL);
3923 TREE_TYPE (decl) = type;
3924 DECL_UID (decl) = allocate_decl_uid ();
3925 SET_DECL_PT_UID (decl, -1);
3926 layout_decl (decl, 0);
3927 return decl;
3928 }
3929
3930 /* Create a new artificial heap variable with NAME.
3931 Return the created variable. */
3932
3933 static varinfo_t
make_heapvar(const char * name,bool add_id)3934 make_heapvar (const char *name, bool add_id)
3935 {
3936 varinfo_t vi;
3937 tree heapvar;
3938
3939 heapvar = build_fake_var_decl (ptr_type_node);
3940 DECL_EXTERNAL (heapvar) = 1;
3941
3942 vi = new_var_info (heapvar, name, add_id);
3943 vi->is_heap_var = true;
3944 vi->is_unknown_size_var = true;
3945 vi->offset = 0;
3946 vi->fullsize = ~0;
3947 vi->size = ~0;
3948 vi->is_full_var = true;
3949 insert_vi_for_tree (heapvar, vi);
3950
3951 return vi;
3952 }
3953
3954 /* Create a new artificial heap variable with NAME and make a
3955 constraint from it to LHS. Set flags according to a tag used
3956 for tracking restrict pointers. */
3957
3958 static varinfo_t
make_constraint_from_restrict(varinfo_t lhs,const char * name,bool add_id)3959 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3960 {
3961 varinfo_t vi = make_heapvar (name, add_id);
3962 vi->is_restrict_var = 1;
3963 vi->is_global_var = 1;
3964 vi->may_have_pointers = 1;
3965 make_constraint_from (lhs, vi->id);
3966 return vi;
3967 }
3968
3969 /* Create a new artificial heap variable with NAME and make a
3970 constraint from it to LHS. Set flags according to a tag used
3971 for tracking restrict pointers and make the artificial heap
3972 point to global memory. */
3973
3974 static varinfo_t
make_constraint_from_global_restrict(varinfo_t lhs,const char * name,bool add_id)3975 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3976 bool add_id)
3977 {
3978 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3979 make_copy_constraint (vi, nonlocal_id);
3980 return vi;
3981 }
3982
3983 /* In IPA mode there are varinfos for different aspects of reach
3984 function designator. One for the points-to set of the return
3985 value, one for the variables that are clobbered by the function,
3986 one for its uses and one for each parameter (including a single
3987 glob for remaining variadic arguments). */
3988
3989 enum { fi_clobbers = 1, fi_uses = 2,
3990 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3991
3992 /* Get a constraint for the requested part of a function designator FI
3993 when operating in IPA mode. */
3994
3995 static struct constraint_expr
get_function_part_constraint(varinfo_t fi,unsigned part)3996 get_function_part_constraint (varinfo_t fi, unsigned part)
3997 {
3998 struct constraint_expr c;
3999
4000 gcc_assert (in_ipa_mode);
4001
4002 if (fi->id == anything_id)
4003 {
4004 /* ??? We probably should have a ANYFN special variable. */
4005 c.var = anything_id;
4006 c.offset = 0;
4007 c.type = SCALAR;
4008 }
4009 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
4010 {
4011 varinfo_t ai = first_vi_for_offset (fi, part);
4012 if (ai)
4013 c.var = ai->id;
4014 else
4015 c.var = anything_id;
4016 c.offset = 0;
4017 c.type = SCALAR;
4018 }
4019 else
4020 {
4021 c.var = fi->id;
4022 c.offset = part;
4023 c.type = DEREF;
4024 }
4025
4026 return c;
4027 }
4028
4029 /* Produce constraints for argument ARG of call STMT with eaf flags
4030 FLAGS. RESULTS is array holding constraints for return value.
4031 CALLESCAPE_ID is variable where call loocal escapes are added.
4032 WRITES_GLOVEL_MEMORY is true if callee may write global memory. */
4033
4034 static void
handle_call_arg(gcall * stmt,tree arg,vec<ce_s> * results,int flags,int callescape_id,bool writes_global_memory)4035 handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags,
4036 int callescape_id, bool writes_global_memory)
4037 {
4038 int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ
4039 | EAF_NO_INDIRECT_ESCAPE;
4040 int relevant_flags = relevant_indirect_flags
4041 | EAF_NO_DIRECT_CLOBBER
4042 | EAF_NO_DIRECT_READ
4043 | EAF_NO_DIRECT_ESCAPE;
4044 if (gimple_call_lhs (stmt))
4045 {
4046 relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY;
4047 relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY;
4048
4049 /* If value is never read from it can not be returned indirectly
4050 (except through the escape solution).
4051 For all flags we get these implications right except for
4052 not_returned because we miss return functions in ipa-prop. */
4053
4054 if (flags & EAF_NO_DIRECT_READ)
4055 flags |= EAF_NOT_RETURNED_INDIRECTLY;
4056 }
4057
4058 /* If the argument is not used we can ignore it.
4059 Similarly argument is invisile for us if it not clobbered, does not
4060 escape, is not read and can not be returned. */
4061 if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags))
4062 return;
4063
4064 /* Produce varinfo for direct accesses to ARG. */
4065 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4066 tem->is_reg_var = true;
4067 make_constraint_to (tem->id, arg);
4068 make_any_offset_constraints (tem);
4069
4070 bool callarg_transitive = false;
4071
4072 /* As an compile time optimization if we make no difference between
4073 direct and indirect accesses make arg transitively closed.
4074 This avoids the need to build indir arg and do everything twice. */
4075 if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0)
4076 == ((flags & EAF_NO_DIRECT_CLOBBER) != 0)
4077 && (((flags & EAF_NO_INDIRECT_READ) != 0)
4078 == ((flags & EAF_NO_DIRECT_READ) != 0))
4079 && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0)
4080 == ((flags & EAF_NO_DIRECT_ESCAPE) != 0))
4081 && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0)
4082 == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0)))
4083 {
4084 make_transitive_closure_constraints (tem);
4085 callarg_transitive = true;
4086 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4087 }
4088
4089 /* If necessary, produce varinfo for indirect accesses to ARG. */
4090 varinfo_t indir_tem = NULL;
4091 if (!callarg_transitive
4092 && (flags & relevant_indirect_flags) != relevant_indirect_flags)
4093 {
4094 struct constraint_expr lhs, rhs;
4095 indir_tem = new_var_info (NULL_TREE, "indircallarg", true);
4096 indir_tem->is_reg_var = true;
4097
4098 /* indir_term = *tem. */
4099 lhs.type = SCALAR;
4100 lhs.var = indir_tem->id;
4101 lhs.offset = 0;
4102
4103 rhs.type = DEREF;
4104 rhs.var = tem->id;
4105 rhs.offset = UNKNOWN_OFFSET;
4106 process_constraint (new_constraint (lhs, rhs));
4107
4108 make_any_offset_constraints (indir_tem);
4109
4110 /* If we do not read indirectly there is no need for transitive closure.
4111 We know there is only one level of indirection. */
4112 if (!(flags & EAF_NO_INDIRECT_READ))
4113 make_transitive_closure_constraints (indir_tem);
4114 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4115 }
4116
4117 if (gimple_call_lhs (stmt))
4118 {
4119 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4120 {
4121 struct constraint_expr cexpr;
4122 cexpr.var = tem->id;
4123 cexpr.type = SCALAR;
4124 cexpr.offset = 0;
4125 results->safe_push (cexpr);
4126 }
4127 if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY))
4128 {
4129 struct constraint_expr cexpr;
4130 cexpr.var = indir_tem->id;
4131 cexpr.type = SCALAR;
4132 cexpr.offset = 0;
4133 results->safe_push (cexpr);
4134 }
4135 }
4136
4137 if (!(flags & EAF_NO_DIRECT_READ))
4138 {
4139 varinfo_t uses = get_call_use_vi (stmt);
4140 make_copy_constraint (uses, tem->id);
4141 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ))
4142 make_copy_constraint (uses, indir_tem->id);
4143 }
4144 else
4145 /* To read indirectly we need to read directly. */
4146 gcc_checking_assert (flags & EAF_NO_INDIRECT_READ);
4147
4148 if (!(flags & EAF_NO_DIRECT_CLOBBER))
4149 {
4150 struct constraint_expr lhs, rhs;
4151
4152 /* *arg = callescape. */
4153 lhs.type = DEREF;
4154 lhs.var = tem->id;
4155 lhs.offset = 0;
4156
4157 rhs.type = SCALAR;
4158 rhs.var = callescape_id;
4159 rhs.offset = 0;
4160 process_constraint (new_constraint (lhs, rhs));
4161
4162 /* callclobbered = arg. */
4163 make_copy_constraint (get_call_clobber_vi (stmt), tem->id);
4164 }
4165 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER))
4166 {
4167 struct constraint_expr lhs, rhs;
4168
4169 /* *indir_arg = callescape. */
4170 lhs.type = DEREF;
4171 lhs.var = indir_tem->id;
4172 lhs.offset = 0;
4173
4174 rhs.type = SCALAR;
4175 rhs.var = callescape_id;
4176 rhs.offset = 0;
4177 process_constraint (new_constraint (lhs, rhs));
4178
4179 /* callclobbered = indir_arg. */
4180 make_copy_constraint (get_call_clobber_vi (stmt), indir_tem->id);
4181 }
4182
4183 if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE)))
4184 {
4185 struct constraint_expr lhs, rhs;
4186
4187 /* callescape = arg; */
4188 lhs.var = callescape_id;
4189 lhs.offset = 0;
4190 lhs.type = SCALAR;
4191
4192 rhs.var = tem->id;
4193 rhs.offset = 0;
4194 rhs.type = SCALAR;
4195 process_constraint (new_constraint (lhs, rhs));
4196
4197 if (writes_global_memory)
4198 make_escape_constraint (arg);
4199 }
4200 else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE))
4201 {
4202 struct constraint_expr lhs, rhs;
4203
4204 /* callescape = *(indir_arg + UNKNOWN); */
4205 lhs.var = callescape_id;
4206 lhs.offset = 0;
4207 lhs.type = SCALAR;
4208
4209 rhs.var = indir_tem->id;
4210 rhs.offset = 0;
4211 rhs.type = SCALAR;
4212 process_constraint (new_constraint (lhs, rhs));
4213
4214 if (writes_global_memory)
4215 make_indirect_escape_constraint (tem);
4216 }
4217 }
4218
4219 /* Determine global memory access of call STMT and update
4220 WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */
4221
4222 static void
determine_global_memory_access(gcall * stmt,bool * writes_global_memory,bool * reads_global_memory,bool * uses_global_memory)4223 determine_global_memory_access (gcall *stmt,
4224 bool *writes_global_memory,
4225 bool *reads_global_memory,
4226 bool *uses_global_memory)
4227 {
4228 tree callee;
4229 cgraph_node *node;
4230 modref_summary *summary;
4231
4232 /* We need to detrmine reads to set uses. */
4233 gcc_assert (!uses_global_memory || reads_global_memory);
4234
4235 if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
4236 && (node = cgraph_node::get (callee)) != NULL
4237 && (summary = get_modref_function_summary (node)))
4238 {
4239 if (writes_global_memory && *writes_global_memory)
4240 *writes_global_memory = summary->global_memory_written;
4241 if (reads_global_memory && *reads_global_memory)
4242 *reads_global_memory = summary->global_memory_read;
4243 if (reads_global_memory && uses_global_memory
4244 && !summary->calls_interposable
4245 && !*reads_global_memory && node->binds_to_current_def_p ())
4246 *uses_global_memory = false;
4247 }
4248 if ((writes_global_memory && *writes_global_memory)
4249 || (uses_global_memory && *uses_global_memory)
4250 || (reads_global_memory && *reads_global_memory))
4251 {
4252 attr_fnspec fnspec = gimple_call_fnspec (stmt);
4253 if (fnspec.known_p ())
4254 {
4255 if (writes_global_memory
4256 && !fnspec.global_memory_written_p ())
4257 *writes_global_memory = false;
4258 if (reads_global_memory && !fnspec.global_memory_read_p ())
4259 {
4260 *reads_global_memory = false;
4261 if (uses_global_memory)
4262 *uses_global_memory = false;
4263 }
4264 }
4265 }
4266 }
4267
4268 /* For non-IPA mode, generate constraints necessary for a call on the
4269 RHS and collect return value constraint to RESULTS to be used later in
4270 handle_lhs_call.
4271
4272 IMPLICIT_EAF_FLAGS are added to each function argument. If
4273 WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global
4274 memory. Similar for READS_GLOBAL_MEMORY. */
4275
4276 static void
handle_rhs_call(gcall * stmt,vec<ce_s> * results,int implicit_eaf_flags,bool writes_global_memory,bool reads_global_memory)4277 handle_rhs_call (gcall *stmt, vec<ce_s> *results,
4278 int implicit_eaf_flags,
4279 bool writes_global_memory,
4280 bool reads_global_memory)
4281 {
4282 determine_global_memory_access (stmt, &writes_global_memory,
4283 &reads_global_memory,
4284 NULL);
4285
4286 varinfo_t callescape = new_var_info (NULL_TREE, "callescape", true);
4287
4288 /* If function can use global memory, add it to callescape
4289 and to possible return values. If not we can still use/return addresses
4290 of global symbols. */
4291 struct constraint_expr lhs, rhs;
4292
4293 lhs.type = SCALAR;
4294 lhs.var = callescape->id;
4295 lhs.offset = 0;
4296
4297 rhs.type = reads_global_memory ? SCALAR : ADDRESSOF;
4298 rhs.var = nonlocal_id;
4299 rhs.offset = 0;
4300
4301 process_constraint (new_constraint (lhs, rhs));
4302 results->safe_push (rhs);
4303
4304 varinfo_t uses = get_call_use_vi (stmt);
4305 make_copy_constraint (uses, callescape->id);
4306
4307 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
4308 {
4309 tree arg = gimple_call_arg (stmt, i);
4310 int flags = gimple_call_arg_flags (stmt, i);
4311 handle_call_arg (stmt, arg, results,
4312 flags | implicit_eaf_flags,
4313 callescape->id, writes_global_memory);
4314 }
4315
4316 /* The static chain escapes as well. */
4317 if (gimple_call_chain (stmt))
4318 handle_call_arg (stmt, gimple_call_chain (stmt), results,
4319 implicit_eaf_flags
4320 | gimple_call_static_chain_flags (stmt),
4321 callescape->id, writes_global_memory);
4322
4323 /* And if we applied NRV the address of the return slot escapes as well. */
4324 if (gimple_call_return_slot_opt_p (stmt)
4325 && gimple_call_lhs (stmt) != NULL_TREE
4326 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4327 {
4328 int flags = gimple_call_retslot_flags (stmt);
4329 const int relevant_flags = EAF_NO_DIRECT_ESCAPE
4330 | EAF_NOT_RETURNED_DIRECTLY;
4331
4332 if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags)
4333 {
4334 auto_vec<ce_s> tmpc;
4335
4336 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4337
4338 if (!(flags & EAF_NO_DIRECT_ESCAPE))
4339 {
4340 make_constraints_to (callescape->id, tmpc);
4341 if (writes_global_memory)
4342 make_constraints_to (escaped_id, tmpc);
4343 }
4344 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4345 {
4346 struct constraint_expr *c;
4347 unsigned i;
4348 FOR_EACH_VEC_ELT (tmpc, i, c)
4349 results->safe_push (*c);
4350 }
4351 }
4352 }
4353 }
4354
4355 /* For non-IPA mode, generate constraints necessary for a call
4356 that returns a pointer and assigns it to LHS. This simply makes
4357 the LHS point to global and escaped variables. */
4358
4359 static void
handle_lhs_call(gcall * stmt,tree lhs,int flags,vec<ce_s> & rhsc,tree fndecl)4360 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
4361 tree fndecl)
4362 {
4363 auto_vec<ce_s> lhsc;
4364
4365 get_constraint_for (lhs, &lhsc);
4366 /* If the store is to a global decl make sure to
4367 add proper escape constraints. */
4368 lhs = get_base_address (lhs);
4369 if (lhs
4370 && DECL_P (lhs)
4371 && is_global_var (lhs))
4372 {
4373 struct constraint_expr tmpc;
4374 tmpc.var = escaped_id;
4375 tmpc.offset = 0;
4376 tmpc.type = SCALAR;
4377 lhsc.safe_push (tmpc);
4378 }
4379
4380 /* If the call returns an argument unmodified override the rhs
4381 constraints. */
4382 if (flags & ERF_RETURNS_ARG
4383 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4384 {
4385 tree arg;
4386 rhsc.create (0);
4387 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4388 get_constraint_for (arg, &rhsc);
4389 process_all_all_constraints (lhsc, rhsc);
4390 rhsc.release ();
4391 }
4392 else if (flags & ERF_NOALIAS)
4393 {
4394 varinfo_t vi;
4395 struct constraint_expr tmpc;
4396 rhsc.create (0);
4397 vi = make_heapvar ("HEAP", true);
4398 /* We are marking allocated storage local, we deal with it becoming
4399 global by escaping and setting of vars_contains_escaped_heap. */
4400 DECL_EXTERNAL (vi->decl) = 0;
4401 vi->is_global_var = 0;
4402 /* If this is not a real malloc call assume the memory was
4403 initialized and thus may point to global memory. All
4404 builtin functions with the malloc attribute behave in a sane way. */
4405 if (!fndecl
4406 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4407 make_constraint_from (vi, nonlocal_id);
4408 tmpc.var = vi->id;
4409 tmpc.offset = 0;
4410 tmpc.type = ADDRESSOF;
4411 rhsc.safe_push (tmpc);
4412 process_all_all_constraints (lhsc, rhsc);
4413 rhsc.release ();
4414 }
4415 else
4416 process_all_all_constraints (lhsc, rhsc);
4417 }
4418
4419
4420 /* Return the varinfo for the callee of CALL. */
4421
4422 static varinfo_t
get_fi_for_callee(gcall * call)4423 get_fi_for_callee (gcall *call)
4424 {
4425 tree decl, fn = gimple_call_fn (call);
4426
4427 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4428 fn = OBJ_TYPE_REF_EXPR (fn);
4429
4430 /* If we can directly resolve the function being called, do so.
4431 Otherwise, it must be some sort of indirect expression that
4432 we should still be able to handle. */
4433 decl = gimple_call_addr_fndecl (fn);
4434 if (decl)
4435 return get_vi_for_tree (decl);
4436
4437 /* If the function is anything other than a SSA name pointer we have no
4438 clue and should be getting ANYFN (well, ANYTHING for now). */
4439 if (!fn || TREE_CODE (fn) != SSA_NAME)
4440 return get_varinfo (anything_id);
4441
4442 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4443 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4444 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4445 fn = SSA_NAME_VAR (fn);
4446
4447 return get_vi_for_tree (fn);
4448 }
4449
4450 /* Create constraints for assigning call argument ARG to the incoming parameter
4451 INDEX of function FI. */
4452
4453 static void
find_func_aliases_for_call_arg(varinfo_t fi,unsigned index,tree arg)4454 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4455 {
4456 struct constraint_expr lhs;
4457 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4458
4459 auto_vec<ce_s, 2> rhsc;
4460 get_constraint_for_rhs (arg, &rhsc);
4461
4462 unsigned j;
4463 struct constraint_expr *rhsp;
4464 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4465 process_constraint (new_constraint (lhs, *rhsp));
4466 }
4467
4468 /* Return true if FNDECL may be part of another lto partition. */
4469
4470 static bool
fndecl_maybe_in_other_partition(tree fndecl)4471 fndecl_maybe_in_other_partition (tree fndecl)
4472 {
4473 cgraph_node *fn_node = cgraph_node::get (fndecl);
4474 if (fn_node == NULL)
4475 return true;
4476
4477 return fn_node->in_other_partition;
4478 }
4479
4480 /* Create constraints for the builtin call T. Return true if the call
4481 was handled, otherwise false. */
4482
4483 static bool
find_func_aliases_for_builtin_call(struct function * fn,gcall * t)4484 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4485 {
4486 tree fndecl = gimple_call_fndecl (t);
4487 auto_vec<ce_s, 2> lhsc;
4488 auto_vec<ce_s, 4> rhsc;
4489 varinfo_t fi;
4490
4491 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4492 /* ??? All builtins that are handled here need to be handled
4493 in the alias-oracle query functions explicitly! */
4494 switch (DECL_FUNCTION_CODE (fndecl))
4495 {
4496 /* All the following functions return a pointer to the same object
4497 as their first argument points to. The functions do not add
4498 to the ESCAPED solution. The functions make the first argument
4499 pointed to memory point to what the second argument pointed to
4500 memory points to. */
4501 case BUILT_IN_STRCPY:
4502 case BUILT_IN_STRNCPY:
4503 case BUILT_IN_BCOPY:
4504 case BUILT_IN_MEMCPY:
4505 case BUILT_IN_MEMMOVE:
4506 case BUILT_IN_MEMPCPY:
4507 case BUILT_IN_STPCPY:
4508 case BUILT_IN_STPNCPY:
4509 case BUILT_IN_STRCAT:
4510 case BUILT_IN_STRNCAT:
4511 case BUILT_IN_STRCPY_CHK:
4512 case BUILT_IN_STRNCPY_CHK:
4513 case BUILT_IN_MEMCPY_CHK:
4514 case BUILT_IN_MEMMOVE_CHK:
4515 case BUILT_IN_MEMPCPY_CHK:
4516 case BUILT_IN_STPCPY_CHK:
4517 case BUILT_IN_STPNCPY_CHK:
4518 case BUILT_IN_STRCAT_CHK:
4519 case BUILT_IN_STRNCAT_CHK:
4520 case BUILT_IN_TM_MEMCPY:
4521 case BUILT_IN_TM_MEMMOVE:
4522 {
4523 tree res = gimple_call_lhs (t);
4524 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4525 == BUILT_IN_BCOPY ? 1 : 0));
4526 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4527 == BUILT_IN_BCOPY ? 0 : 1));
4528 if (res != NULL_TREE)
4529 {
4530 get_constraint_for (res, &lhsc);
4531 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4532 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4533 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4534 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4535 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4536 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4537 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4538 else
4539 get_constraint_for (dest, &rhsc);
4540 process_all_all_constraints (lhsc, rhsc);
4541 lhsc.truncate (0);
4542 rhsc.truncate (0);
4543 }
4544 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4545 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4546 do_deref (&lhsc);
4547 do_deref (&rhsc);
4548 process_all_all_constraints (lhsc, rhsc);
4549 return true;
4550 }
4551 case BUILT_IN_MEMSET:
4552 case BUILT_IN_MEMSET_CHK:
4553 case BUILT_IN_TM_MEMSET:
4554 {
4555 tree res = gimple_call_lhs (t);
4556 tree dest = gimple_call_arg (t, 0);
4557 unsigned i;
4558 ce_s *lhsp;
4559 struct constraint_expr ac;
4560 if (res != NULL_TREE)
4561 {
4562 get_constraint_for (res, &lhsc);
4563 get_constraint_for (dest, &rhsc);
4564 process_all_all_constraints (lhsc, rhsc);
4565 lhsc.truncate (0);
4566 }
4567 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4568 do_deref (&lhsc);
4569 if (flag_delete_null_pointer_checks
4570 && integer_zerop (gimple_call_arg (t, 1)))
4571 {
4572 ac.type = ADDRESSOF;
4573 ac.var = nothing_id;
4574 }
4575 else
4576 {
4577 ac.type = SCALAR;
4578 ac.var = integer_id;
4579 }
4580 ac.offset = 0;
4581 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4582 process_constraint (new_constraint (*lhsp, ac));
4583 return true;
4584 }
4585 case BUILT_IN_STACK_SAVE:
4586 case BUILT_IN_STACK_RESTORE:
4587 /* Nothing interesting happens. */
4588 return true;
4589 case BUILT_IN_ALLOCA:
4590 case BUILT_IN_ALLOCA_WITH_ALIGN:
4591 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4592 {
4593 tree ptr = gimple_call_lhs (t);
4594 if (ptr == NULL_TREE)
4595 return true;
4596 get_constraint_for (ptr, &lhsc);
4597 varinfo_t vi = make_heapvar ("HEAP", true);
4598 /* Alloca storage is never global. To exempt it from escaped
4599 handling make it a non-heap var. */
4600 DECL_EXTERNAL (vi->decl) = 0;
4601 vi->is_global_var = 0;
4602 vi->is_heap_var = 0;
4603 struct constraint_expr tmpc;
4604 tmpc.var = vi->id;
4605 tmpc.offset = 0;
4606 tmpc.type = ADDRESSOF;
4607 rhsc.safe_push (tmpc);
4608 process_all_all_constraints (lhsc, rhsc);
4609 return true;
4610 }
4611 case BUILT_IN_POSIX_MEMALIGN:
4612 {
4613 tree ptrptr = gimple_call_arg (t, 0);
4614 get_constraint_for (ptrptr, &lhsc);
4615 do_deref (&lhsc);
4616 varinfo_t vi = make_heapvar ("HEAP", true);
4617 /* We are marking allocated storage local, we deal with it becoming
4618 global by escaping and setting of vars_contains_escaped_heap. */
4619 DECL_EXTERNAL (vi->decl) = 0;
4620 vi->is_global_var = 0;
4621 struct constraint_expr tmpc;
4622 tmpc.var = vi->id;
4623 tmpc.offset = 0;
4624 tmpc.type = ADDRESSOF;
4625 rhsc.safe_push (tmpc);
4626 process_all_all_constraints (lhsc, rhsc);
4627 return true;
4628 }
4629 case BUILT_IN_ASSUME_ALIGNED:
4630 {
4631 tree res = gimple_call_lhs (t);
4632 tree dest = gimple_call_arg (t, 0);
4633 if (res != NULL_TREE)
4634 {
4635 get_constraint_for (res, &lhsc);
4636 get_constraint_for (dest, &rhsc);
4637 process_all_all_constraints (lhsc, rhsc);
4638 }
4639 return true;
4640 }
4641 /* All the following functions do not return pointers, do not
4642 modify the points-to sets of memory reachable from their
4643 arguments and do not add to the ESCAPED solution. */
4644 case BUILT_IN_SINCOS:
4645 case BUILT_IN_SINCOSF:
4646 case BUILT_IN_SINCOSL:
4647 case BUILT_IN_FREXP:
4648 case BUILT_IN_FREXPF:
4649 case BUILT_IN_FREXPL:
4650 case BUILT_IN_GAMMA_R:
4651 case BUILT_IN_GAMMAF_R:
4652 case BUILT_IN_GAMMAL_R:
4653 case BUILT_IN_LGAMMA_R:
4654 case BUILT_IN_LGAMMAF_R:
4655 case BUILT_IN_LGAMMAL_R:
4656 case BUILT_IN_MODF:
4657 case BUILT_IN_MODFF:
4658 case BUILT_IN_MODFL:
4659 case BUILT_IN_REMQUO:
4660 case BUILT_IN_REMQUOF:
4661 case BUILT_IN_REMQUOL:
4662 case BUILT_IN_FREE:
4663 return true;
4664 case BUILT_IN_STRDUP:
4665 case BUILT_IN_STRNDUP:
4666 case BUILT_IN_REALLOC:
4667 if (gimple_call_lhs (t))
4668 {
4669 auto_vec<ce_s> rhsc;
4670 handle_lhs_call (t, gimple_call_lhs (t),
4671 gimple_call_return_flags (t) | ERF_NOALIAS,
4672 rhsc, fndecl);
4673 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4674 NULL_TREE, &lhsc);
4675 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4676 NULL_TREE, &rhsc);
4677 do_deref (&lhsc);
4678 do_deref (&rhsc);
4679 process_all_all_constraints (lhsc, rhsc);
4680 lhsc.truncate (0);
4681 rhsc.truncate (0);
4682 /* For realloc the resulting pointer can be equal to the
4683 argument as well. But only doing this wouldn't be
4684 correct because with ptr == 0 realloc behaves like malloc. */
4685 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4686 {
4687 get_constraint_for (gimple_call_lhs (t), &lhsc);
4688 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4689 process_all_all_constraints (lhsc, rhsc);
4690 }
4691 return true;
4692 }
4693 break;
4694 /* String / character search functions return a pointer into the
4695 source string or NULL. */
4696 case BUILT_IN_INDEX:
4697 case BUILT_IN_STRCHR:
4698 case BUILT_IN_STRRCHR:
4699 case BUILT_IN_MEMCHR:
4700 case BUILT_IN_STRSTR:
4701 case BUILT_IN_STRPBRK:
4702 if (gimple_call_lhs (t))
4703 {
4704 tree src = gimple_call_arg (t, 0);
4705 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4706 constraint_expr nul;
4707 nul.var = nothing_id;
4708 nul.offset = 0;
4709 nul.type = ADDRESSOF;
4710 rhsc.safe_push (nul);
4711 get_constraint_for (gimple_call_lhs (t), &lhsc);
4712 process_all_all_constraints (lhsc, rhsc);
4713 }
4714 return true;
4715 /* Pure functions that return something not based on any object and
4716 that use the memory pointed to by their arguments (but not
4717 transitively). */
4718 case BUILT_IN_STRCMP:
4719 case BUILT_IN_STRCMP_EQ:
4720 case BUILT_IN_STRNCMP:
4721 case BUILT_IN_STRNCMP_EQ:
4722 case BUILT_IN_STRCASECMP:
4723 case BUILT_IN_STRNCASECMP:
4724 case BUILT_IN_MEMCMP:
4725 case BUILT_IN_BCMP:
4726 case BUILT_IN_STRSPN:
4727 case BUILT_IN_STRCSPN:
4728 {
4729 varinfo_t uses = get_call_use_vi (t);
4730 make_any_offset_constraints (uses);
4731 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4732 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4733 /* No constraints are necessary for the return value. */
4734 return true;
4735 }
4736 case BUILT_IN_STRLEN:
4737 {
4738 varinfo_t uses = get_call_use_vi (t);
4739 make_any_offset_constraints (uses);
4740 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4741 /* No constraints are necessary for the return value. */
4742 return true;
4743 }
4744 case BUILT_IN_OBJECT_SIZE:
4745 case BUILT_IN_CONSTANT_P:
4746 {
4747 /* No constraints are necessary for the return value or the
4748 arguments. */
4749 return true;
4750 }
4751 /* Trampolines are special - they set up passing the static
4752 frame. */
4753 case BUILT_IN_INIT_TRAMPOLINE:
4754 {
4755 tree tramp = gimple_call_arg (t, 0);
4756 tree nfunc = gimple_call_arg (t, 1);
4757 tree frame = gimple_call_arg (t, 2);
4758 unsigned i;
4759 struct constraint_expr lhs, *rhsp;
4760 if (in_ipa_mode)
4761 {
4762 varinfo_t nfi = NULL;
4763 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4764 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4765 if (nfi)
4766 {
4767 lhs = get_function_part_constraint (nfi, fi_static_chain);
4768 get_constraint_for (frame, &rhsc);
4769 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4770 process_constraint (new_constraint (lhs, *rhsp));
4771 rhsc.truncate (0);
4772
4773 /* Make the frame point to the function for
4774 the trampoline adjustment call. */
4775 get_constraint_for (tramp, &lhsc);
4776 do_deref (&lhsc);
4777 get_constraint_for (nfunc, &rhsc);
4778 process_all_all_constraints (lhsc, rhsc);
4779
4780 return true;
4781 }
4782 }
4783 /* Else fallthru to generic handling which will let
4784 the frame escape. */
4785 break;
4786 }
4787 case BUILT_IN_ADJUST_TRAMPOLINE:
4788 {
4789 tree tramp = gimple_call_arg (t, 0);
4790 tree res = gimple_call_lhs (t);
4791 if (in_ipa_mode && res)
4792 {
4793 get_constraint_for (res, &lhsc);
4794 get_constraint_for (tramp, &rhsc);
4795 do_deref (&rhsc);
4796 process_all_all_constraints (lhsc, rhsc);
4797 }
4798 return true;
4799 }
4800 CASE_BUILT_IN_TM_STORE (1):
4801 CASE_BUILT_IN_TM_STORE (2):
4802 CASE_BUILT_IN_TM_STORE (4):
4803 CASE_BUILT_IN_TM_STORE (8):
4804 CASE_BUILT_IN_TM_STORE (FLOAT):
4805 CASE_BUILT_IN_TM_STORE (DOUBLE):
4806 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4807 CASE_BUILT_IN_TM_STORE (M64):
4808 CASE_BUILT_IN_TM_STORE (M128):
4809 CASE_BUILT_IN_TM_STORE (M256):
4810 {
4811 tree addr = gimple_call_arg (t, 0);
4812 tree src = gimple_call_arg (t, 1);
4813
4814 get_constraint_for (addr, &lhsc);
4815 do_deref (&lhsc);
4816 get_constraint_for (src, &rhsc);
4817 process_all_all_constraints (lhsc, rhsc);
4818 return true;
4819 }
4820 CASE_BUILT_IN_TM_LOAD (1):
4821 CASE_BUILT_IN_TM_LOAD (2):
4822 CASE_BUILT_IN_TM_LOAD (4):
4823 CASE_BUILT_IN_TM_LOAD (8):
4824 CASE_BUILT_IN_TM_LOAD (FLOAT):
4825 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4826 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4827 CASE_BUILT_IN_TM_LOAD (M64):
4828 CASE_BUILT_IN_TM_LOAD (M128):
4829 CASE_BUILT_IN_TM_LOAD (M256):
4830 {
4831 tree dest = gimple_call_lhs (t);
4832 tree addr = gimple_call_arg (t, 0);
4833
4834 get_constraint_for (dest, &lhsc);
4835 get_constraint_for (addr, &rhsc);
4836 do_deref (&rhsc);
4837 process_all_all_constraints (lhsc, rhsc);
4838 return true;
4839 }
4840 /* Variadic argument handling needs to be handled in IPA
4841 mode as well. */
4842 case BUILT_IN_VA_START:
4843 {
4844 tree valist = gimple_call_arg (t, 0);
4845 struct constraint_expr rhs, *lhsp;
4846 unsigned i;
4847 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4848 do_deref (&lhsc);
4849 /* The va_list gets access to pointers in variadic
4850 arguments. Which we know in the case of IPA analysis
4851 and otherwise are just all nonlocal variables. */
4852 if (in_ipa_mode)
4853 {
4854 fi = lookup_vi_for_tree (fn->decl);
4855 rhs = get_function_part_constraint (fi, ~0);
4856 rhs.type = ADDRESSOF;
4857 }
4858 else
4859 {
4860 rhs.var = nonlocal_id;
4861 rhs.type = ADDRESSOF;
4862 rhs.offset = 0;
4863 }
4864 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4865 process_constraint (new_constraint (*lhsp, rhs));
4866 /* va_list is clobbered. */
4867 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4868 return true;
4869 }
4870 /* va_end doesn't have any effect that matters. */
4871 case BUILT_IN_VA_END:
4872 return true;
4873 /* Alternate return. Simply give up for now. */
4874 case BUILT_IN_RETURN:
4875 {
4876 fi = NULL;
4877 if (!in_ipa_mode
4878 || !(fi = get_vi_for_tree (fn->decl)))
4879 make_constraint_from (get_varinfo (escaped_id), anything_id);
4880 else if (in_ipa_mode
4881 && fi != NULL)
4882 {
4883 struct constraint_expr lhs, rhs;
4884 lhs = get_function_part_constraint (fi, fi_result);
4885 rhs.var = anything_id;
4886 rhs.offset = 0;
4887 rhs.type = SCALAR;
4888 process_constraint (new_constraint (lhs, rhs));
4889 }
4890 return true;
4891 }
4892 case BUILT_IN_GOMP_PARALLEL:
4893 case BUILT_IN_GOACC_PARALLEL:
4894 {
4895 if (in_ipa_mode)
4896 {
4897 unsigned int fnpos, argpos;
4898 switch (DECL_FUNCTION_CODE (fndecl))
4899 {
4900 case BUILT_IN_GOMP_PARALLEL:
4901 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4902 fnpos = 0;
4903 argpos = 1;
4904 break;
4905 case BUILT_IN_GOACC_PARALLEL:
4906 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4907 sizes, kinds, ...). */
4908 fnpos = 1;
4909 argpos = 3;
4910 break;
4911 default:
4912 gcc_unreachable ();
4913 }
4914
4915 tree fnarg = gimple_call_arg (t, fnpos);
4916 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4917 tree fndecl = TREE_OPERAND (fnarg, 0);
4918 if (fndecl_maybe_in_other_partition (fndecl))
4919 /* Fallthru to general call handling. */
4920 break;
4921
4922 tree arg = gimple_call_arg (t, argpos);
4923
4924 varinfo_t fi = get_vi_for_tree (fndecl);
4925 find_func_aliases_for_call_arg (fi, 0, arg);
4926 return true;
4927 }
4928 /* Else fallthru to generic call handling. */
4929 break;
4930 }
4931 /* printf-style functions may have hooks to set pointers to
4932 point to somewhere into the generated string. Leave them
4933 for a later exercise... */
4934 default:
4935 /* Fallthru to general call handling. */;
4936 }
4937
4938 return false;
4939 }
4940
4941 /* Create constraints for the call T. */
4942
4943 static void
find_func_aliases_for_call(struct function * fn,gcall * t)4944 find_func_aliases_for_call (struct function *fn, gcall *t)
4945 {
4946 tree fndecl = gimple_call_fndecl (t);
4947 varinfo_t fi;
4948
4949 if (fndecl != NULL_TREE
4950 && fndecl_built_in_p (fndecl)
4951 && find_func_aliases_for_builtin_call (fn, t))
4952 return;
4953
4954 if (gimple_call_internal_p (t, IFN_DEFERRED_INIT))
4955 return;
4956
4957 fi = get_fi_for_callee (t);
4958 if (!in_ipa_mode
4959 || (fi->decl && fndecl && !fi->is_fn_info))
4960 {
4961 auto_vec<ce_s, 16> rhsc;
4962 int flags = gimple_call_flags (t);
4963
4964 /* Const functions can return their arguments and addresses
4965 of global memory but not of escaped memory. */
4966 if (flags & (ECF_CONST|ECF_NOVOPS))
4967 {
4968 if (gimple_call_lhs (t))
4969 handle_rhs_call (t, &rhsc, implicit_const_eaf_flags, false, false);
4970 }
4971 /* Pure functions can return addresses in and of memory
4972 reachable from their arguments, but they are not an escape
4973 point for reachable memory of their arguments. */
4974 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4975 handle_rhs_call (t, &rhsc, implicit_pure_eaf_flags, false, true);
4976 /* If the call is to a replaceable operator delete and results
4977 from a delete expression as opposed to a direct call to
4978 such operator, then the effects for PTA (in particular
4979 the escaping of the pointer) can be ignored. */
4980 else if (fndecl
4981 && DECL_IS_OPERATOR_DELETE_P (fndecl)
4982 && gimple_call_from_new_or_delete (t))
4983 ;
4984 else
4985 handle_rhs_call (t, &rhsc, 0, true, true);
4986 if (gimple_call_lhs (t))
4987 handle_lhs_call (t, gimple_call_lhs (t),
4988 gimple_call_return_flags (t), rhsc, fndecl);
4989 }
4990 else
4991 {
4992 auto_vec<ce_s, 2> rhsc;
4993 tree lhsop;
4994 unsigned j;
4995
4996 /* Assign all the passed arguments to the appropriate incoming
4997 parameters of the function. */
4998 for (j = 0; j < gimple_call_num_args (t); j++)
4999 {
5000 tree arg = gimple_call_arg (t, j);
5001 find_func_aliases_for_call_arg (fi, j, arg);
5002 }
5003
5004 /* If we are returning a value, assign it to the result. */
5005 lhsop = gimple_call_lhs (t);
5006 if (lhsop)
5007 {
5008 auto_vec<ce_s, 2> lhsc;
5009 struct constraint_expr rhs;
5010 struct constraint_expr *lhsp;
5011 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
5012
5013 get_constraint_for (lhsop, &lhsc);
5014 rhs = get_function_part_constraint (fi, fi_result);
5015 if (aggr_p)
5016 {
5017 auto_vec<ce_s, 2> tem;
5018 tem.quick_push (rhs);
5019 do_deref (&tem);
5020 gcc_checking_assert (tem.length () == 1);
5021 rhs = tem[0];
5022 }
5023 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5024 process_constraint (new_constraint (*lhsp, rhs));
5025
5026 /* If we pass the result decl by reference, honor that. */
5027 if (aggr_p)
5028 {
5029 struct constraint_expr lhs;
5030 struct constraint_expr *rhsp;
5031
5032 get_constraint_for_address_of (lhsop, &rhsc);
5033 lhs = get_function_part_constraint (fi, fi_result);
5034 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5035 process_constraint (new_constraint (lhs, *rhsp));
5036 rhsc.truncate (0);
5037 }
5038 }
5039
5040 /* If we use a static chain, pass it along. */
5041 if (gimple_call_chain (t))
5042 {
5043 struct constraint_expr lhs;
5044 struct constraint_expr *rhsp;
5045
5046 get_constraint_for (gimple_call_chain (t), &rhsc);
5047 lhs = get_function_part_constraint (fi, fi_static_chain);
5048 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5049 process_constraint (new_constraint (lhs, *rhsp));
5050 }
5051 }
5052 }
5053
5054 /* Walk statement T setting up aliasing constraints according to the
5055 references found in T. This function is the main part of the
5056 constraint builder. AI points to auxiliary alias information used
5057 when building alias sets and computing alias grouping heuristics. */
5058
5059 static void
find_func_aliases(struct function * fn,gimple * origt)5060 find_func_aliases (struct function *fn, gimple *origt)
5061 {
5062 gimple *t = origt;
5063 auto_vec<ce_s, 16> lhsc;
5064 auto_vec<ce_s, 16> rhsc;
5065 varinfo_t fi;
5066
5067 /* Now build constraints expressions. */
5068 if (gimple_code (t) == GIMPLE_PHI)
5069 {
5070 /* For a phi node, assign all the arguments to
5071 the result. */
5072 get_constraint_for (gimple_phi_result (t), &lhsc);
5073 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
5074 {
5075 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
5076 process_all_all_constraints (lhsc, rhsc);
5077 rhsc.truncate (0);
5078 }
5079 }
5080 /* In IPA mode, we need to generate constraints to pass call
5081 arguments through their calls. There are two cases,
5082 either a GIMPLE_CALL returning a value, or just a plain
5083 GIMPLE_CALL when we are not.
5084
5085 In non-ipa mode, we need to generate constraints for each
5086 pointer passed by address. */
5087 else if (is_gimple_call (t))
5088 find_func_aliases_for_call (fn, as_a <gcall *> (t));
5089
5090 /* Otherwise, just a regular assignment statement. Only care about
5091 operations with pointer result, others are dealt with as escape
5092 points if they have pointer operands. */
5093 else if (is_gimple_assign (t))
5094 {
5095 /* Otherwise, just a regular assignment statement. */
5096 tree lhsop = gimple_assign_lhs (t);
5097 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
5098
5099 if (rhsop && TREE_CLOBBER_P (rhsop))
5100 /* Ignore clobbers, they don't actually store anything into
5101 the LHS. */
5102 ;
5103 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
5104 do_structure_copy (lhsop, rhsop);
5105 else
5106 {
5107 enum tree_code code = gimple_assign_rhs_code (t);
5108
5109 get_constraint_for (lhsop, &lhsc);
5110
5111 if (code == POINTER_PLUS_EXPR)
5112 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5113 gimple_assign_rhs2 (t), &rhsc);
5114 else if (code == POINTER_DIFF_EXPR)
5115 /* The result is not a pointer (part). */
5116 ;
5117 else if (code == BIT_AND_EXPR
5118 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
5119 {
5120 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
5121 the pointer. Handle it by offsetting it by UNKNOWN. */
5122 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5123 NULL_TREE, &rhsc);
5124 }
5125 else if (code == TRUNC_DIV_EXPR
5126 || code == CEIL_DIV_EXPR
5127 || code == FLOOR_DIV_EXPR
5128 || code == ROUND_DIV_EXPR
5129 || code == EXACT_DIV_EXPR
5130 || code == TRUNC_MOD_EXPR
5131 || code == CEIL_MOD_EXPR
5132 || code == FLOOR_MOD_EXPR
5133 || code == ROUND_MOD_EXPR)
5134 /* Division and modulo transfer the pointer from the LHS. */
5135 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5136 NULL_TREE, &rhsc);
5137 else if (CONVERT_EXPR_CODE_P (code)
5138 || gimple_assign_single_p (t))
5139 /* See through conversions, single RHS are handled by
5140 get_constraint_for_rhs. */
5141 get_constraint_for_rhs (rhsop, &rhsc);
5142 else if (code == COND_EXPR)
5143 {
5144 /* The result is a merge of both COND_EXPR arms. */
5145 auto_vec<ce_s, 2> tmp;
5146 struct constraint_expr *rhsp;
5147 unsigned i;
5148 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5149 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5150 FOR_EACH_VEC_ELT (tmp, i, rhsp)
5151 rhsc.safe_push (*rhsp);
5152 }
5153 else if (truth_value_p (code))
5154 /* Truth value results are not pointer (parts). Or at least
5155 very unreasonable obfuscation of a part. */
5156 ;
5157 else
5158 {
5159 /* All other operations are possibly offsetting merges. */
5160 auto_vec<ce_s, 4> tmp;
5161 struct constraint_expr *rhsp;
5162 unsigned i, j;
5163 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5164 NULL_TREE, &rhsc);
5165 for (i = 2; i < gimple_num_ops (t); ++i)
5166 {
5167 get_constraint_for_ptr_offset (gimple_op (t, i),
5168 NULL_TREE, &tmp);
5169 FOR_EACH_VEC_ELT (tmp, j, rhsp)
5170 rhsc.safe_push (*rhsp);
5171 tmp.truncate (0);
5172 }
5173 }
5174 process_all_all_constraints (lhsc, rhsc);
5175 }
5176 /* If there is a store to a global variable the rhs escapes. */
5177 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5178 && DECL_P (lhsop))
5179 {
5180 varinfo_t vi = get_vi_for_tree (lhsop);
5181 if ((! in_ipa_mode && vi->is_global_var)
5182 || vi->is_ipa_escape_point)
5183 make_escape_constraint (rhsop);
5184 }
5185 }
5186 /* Handle escapes through return. */
5187 else if (gimple_code (t) == GIMPLE_RETURN
5188 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5189 {
5190 greturn *return_stmt = as_a <greturn *> (t);
5191 fi = NULL;
5192 if (!in_ipa_mode
5193 && SSA_VAR_P (gimple_return_retval (return_stmt)))
5194 {
5195 /* We handle simple returns by post-processing the solutions. */
5196 ;
5197 }
5198 if (!(fi = get_vi_for_tree (fn->decl)))
5199 make_escape_constraint (gimple_return_retval (return_stmt));
5200 else if (in_ipa_mode)
5201 {
5202 struct constraint_expr lhs ;
5203 struct constraint_expr *rhsp;
5204 unsigned i;
5205
5206 lhs = get_function_part_constraint (fi, fi_result);
5207 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5208 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5209 process_constraint (new_constraint (lhs, *rhsp));
5210 }
5211 }
5212 /* Handle asms conservatively by adding escape constraints to everything. */
5213 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5214 {
5215 unsigned i, noutputs;
5216 const char **oconstraints;
5217 const char *constraint;
5218 bool allows_mem, allows_reg, is_inout;
5219
5220 noutputs = gimple_asm_noutputs (asm_stmt);
5221 oconstraints = XALLOCAVEC (const char *, noutputs);
5222
5223 for (i = 0; i < noutputs; ++i)
5224 {
5225 tree link = gimple_asm_output_op (asm_stmt, i);
5226 tree op = TREE_VALUE (link);
5227
5228 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5229 oconstraints[i] = constraint;
5230 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5231 &allows_reg, &is_inout);
5232
5233 /* A memory constraint makes the address of the operand escape. */
5234 if (!allows_reg && allows_mem)
5235 make_escape_constraint (build_fold_addr_expr (op));
5236
5237 /* The asm may read global memory, so outputs may point to
5238 any global memory. */
5239 if (op)
5240 {
5241 auto_vec<ce_s, 2> lhsc;
5242 struct constraint_expr rhsc, *lhsp;
5243 unsigned j;
5244 get_constraint_for (op, &lhsc);
5245 rhsc.var = nonlocal_id;
5246 rhsc.offset = 0;
5247 rhsc.type = SCALAR;
5248 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5249 process_constraint (new_constraint (*lhsp, rhsc));
5250 }
5251 }
5252 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5253 {
5254 tree link = gimple_asm_input_op (asm_stmt, i);
5255 tree op = TREE_VALUE (link);
5256
5257 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5258
5259 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5260 &allows_mem, &allows_reg);
5261
5262 /* A memory constraint makes the address of the operand escape. */
5263 if (!allows_reg && allows_mem)
5264 make_escape_constraint (build_fold_addr_expr (op));
5265 /* Strictly we'd only need the constraint to ESCAPED if
5266 the asm clobbers memory, otherwise using something
5267 along the lines of per-call clobbers/uses would be enough. */
5268 else if (op)
5269 make_escape_constraint (op);
5270 }
5271 }
5272 }
5273
5274
5275 /* Create a constraint adding to the clobber set of FI the memory
5276 pointed to by PTR. */
5277
5278 static void
process_ipa_clobber(varinfo_t fi,tree ptr)5279 process_ipa_clobber (varinfo_t fi, tree ptr)
5280 {
5281 vec<ce_s> ptrc = vNULL;
5282 struct constraint_expr *c, lhs;
5283 unsigned i;
5284 get_constraint_for_rhs (ptr, &ptrc);
5285 lhs = get_function_part_constraint (fi, fi_clobbers);
5286 FOR_EACH_VEC_ELT (ptrc, i, c)
5287 process_constraint (new_constraint (lhs, *c));
5288 ptrc.release ();
5289 }
5290
5291 /* Walk statement T setting up clobber and use constraints according to the
5292 references found in T. This function is a main part of the
5293 IPA constraint builder. */
5294
5295 static void
find_func_clobbers(struct function * fn,gimple * origt)5296 find_func_clobbers (struct function *fn, gimple *origt)
5297 {
5298 gimple *t = origt;
5299 auto_vec<ce_s, 16> lhsc;
5300 auto_vec<ce_s, 16> rhsc;
5301 varinfo_t fi;
5302
5303 /* Add constraints for clobbered/used in IPA mode.
5304 We are not interested in what automatic variables are clobbered
5305 or used as we only use the information in the caller to which
5306 they do not escape. */
5307 gcc_assert (in_ipa_mode);
5308
5309 /* If the stmt refers to memory in any way it better had a VUSE. */
5310 if (gimple_vuse (t) == NULL_TREE)
5311 return;
5312
5313 /* We'd better have function information for the current function. */
5314 fi = lookup_vi_for_tree (fn->decl);
5315 gcc_assert (fi != NULL);
5316
5317 /* Account for stores in assignments and calls. */
5318 if (gimple_vdef (t) != NULL_TREE
5319 && gimple_has_lhs (t))
5320 {
5321 tree lhs = gimple_get_lhs (t);
5322 tree tem = lhs;
5323 while (handled_component_p (tem))
5324 tem = TREE_OPERAND (tem, 0);
5325 if ((DECL_P (tem)
5326 && !auto_var_in_fn_p (tem, fn->decl))
5327 || INDIRECT_REF_P (tem)
5328 || (TREE_CODE (tem) == MEM_REF
5329 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5330 && auto_var_in_fn_p
5331 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5332 {
5333 struct constraint_expr lhsc, *rhsp;
5334 unsigned i;
5335 lhsc = get_function_part_constraint (fi, fi_clobbers);
5336 get_constraint_for_address_of (lhs, &rhsc);
5337 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5338 process_constraint (new_constraint (lhsc, *rhsp));
5339 rhsc.truncate (0);
5340 }
5341 }
5342
5343 /* Account for uses in assigments and returns. */
5344 if (gimple_assign_single_p (t)
5345 || (gimple_code (t) == GIMPLE_RETURN
5346 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5347 {
5348 tree rhs = (gimple_assign_single_p (t)
5349 ? gimple_assign_rhs1 (t)
5350 : gimple_return_retval (as_a <greturn *> (t)));
5351 tree tem = rhs;
5352 while (handled_component_p (tem))
5353 tem = TREE_OPERAND (tem, 0);
5354 if ((DECL_P (tem)
5355 && !auto_var_in_fn_p (tem, fn->decl))
5356 || INDIRECT_REF_P (tem)
5357 || (TREE_CODE (tem) == MEM_REF
5358 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5359 && auto_var_in_fn_p
5360 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5361 {
5362 struct constraint_expr lhs, *rhsp;
5363 unsigned i;
5364 lhs = get_function_part_constraint (fi, fi_uses);
5365 get_constraint_for_address_of (rhs, &rhsc);
5366 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5367 process_constraint (new_constraint (lhs, *rhsp));
5368 rhsc.truncate (0);
5369 }
5370 }
5371
5372 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5373 {
5374 varinfo_t cfi = NULL;
5375 tree decl = gimple_call_fndecl (t);
5376 struct constraint_expr lhs, rhs;
5377 unsigned i, j;
5378
5379 /* For builtins we do not have separate function info. For those
5380 we do not generate escapes for we have to generate clobbers/uses. */
5381 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5382 switch (DECL_FUNCTION_CODE (decl))
5383 {
5384 /* The following functions use and clobber memory pointed to
5385 by their arguments. */
5386 case BUILT_IN_STRCPY:
5387 case BUILT_IN_STRNCPY:
5388 case BUILT_IN_BCOPY:
5389 case BUILT_IN_MEMCPY:
5390 case BUILT_IN_MEMMOVE:
5391 case BUILT_IN_MEMPCPY:
5392 case BUILT_IN_STPCPY:
5393 case BUILT_IN_STPNCPY:
5394 case BUILT_IN_STRCAT:
5395 case BUILT_IN_STRNCAT:
5396 case BUILT_IN_STRCPY_CHK:
5397 case BUILT_IN_STRNCPY_CHK:
5398 case BUILT_IN_MEMCPY_CHK:
5399 case BUILT_IN_MEMMOVE_CHK:
5400 case BUILT_IN_MEMPCPY_CHK:
5401 case BUILT_IN_STPCPY_CHK:
5402 case BUILT_IN_STPNCPY_CHK:
5403 case BUILT_IN_STRCAT_CHK:
5404 case BUILT_IN_STRNCAT_CHK:
5405 {
5406 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5407 == BUILT_IN_BCOPY ? 1 : 0));
5408 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5409 == BUILT_IN_BCOPY ? 0 : 1));
5410 unsigned i;
5411 struct constraint_expr *rhsp, *lhsp;
5412 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5413 lhs = get_function_part_constraint (fi, fi_clobbers);
5414 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5415 process_constraint (new_constraint (lhs, *lhsp));
5416 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5417 lhs = get_function_part_constraint (fi, fi_uses);
5418 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5419 process_constraint (new_constraint (lhs, *rhsp));
5420 return;
5421 }
5422 /* The following function clobbers memory pointed to by
5423 its argument. */
5424 case BUILT_IN_MEMSET:
5425 case BUILT_IN_MEMSET_CHK:
5426 case BUILT_IN_POSIX_MEMALIGN:
5427 {
5428 tree dest = gimple_call_arg (t, 0);
5429 unsigned i;
5430 ce_s *lhsp;
5431 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5432 lhs = get_function_part_constraint (fi, fi_clobbers);
5433 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5434 process_constraint (new_constraint (lhs, *lhsp));
5435 return;
5436 }
5437 /* The following functions clobber their second and third
5438 arguments. */
5439 case BUILT_IN_SINCOS:
5440 case BUILT_IN_SINCOSF:
5441 case BUILT_IN_SINCOSL:
5442 {
5443 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5444 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5445 return;
5446 }
5447 /* The following functions clobber their second argument. */
5448 case BUILT_IN_FREXP:
5449 case BUILT_IN_FREXPF:
5450 case BUILT_IN_FREXPL:
5451 case BUILT_IN_LGAMMA_R:
5452 case BUILT_IN_LGAMMAF_R:
5453 case BUILT_IN_LGAMMAL_R:
5454 case BUILT_IN_GAMMA_R:
5455 case BUILT_IN_GAMMAF_R:
5456 case BUILT_IN_GAMMAL_R:
5457 case BUILT_IN_MODF:
5458 case BUILT_IN_MODFF:
5459 case BUILT_IN_MODFL:
5460 {
5461 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5462 return;
5463 }
5464 /* The following functions clobber their third argument. */
5465 case BUILT_IN_REMQUO:
5466 case BUILT_IN_REMQUOF:
5467 case BUILT_IN_REMQUOL:
5468 {
5469 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5470 return;
5471 }
5472 /* The following functions neither read nor clobber memory. */
5473 case BUILT_IN_ASSUME_ALIGNED:
5474 case BUILT_IN_FREE:
5475 return;
5476 /* Trampolines are of no interest to us. */
5477 case BUILT_IN_INIT_TRAMPOLINE:
5478 case BUILT_IN_ADJUST_TRAMPOLINE:
5479 return;
5480 case BUILT_IN_VA_START:
5481 case BUILT_IN_VA_END:
5482 return;
5483 case BUILT_IN_GOMP_PARALLEL:
5484 case BUILT_IN_GOACC_PARALLEL:
5485 {
5486 unsigned int fnpos, argpos;
5487 unsigned int implicit_use_args[2];
5488 unsigned int num_implicit_use_args = 0;
5489 switch (DECL_FUNCTION_CODE (decl))
5490 {
5491 case BUILT_IN_GOMP_PARALLEL:
5492 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5493 fnpos = 0;
5494 argpos = 1;
5495 break;
5496 case BUILT_IN_GOACC_PARALLEL:
5497 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5498 sizes, kinds, ...). */
5499 fnpos = 1;
5500 argpos = 3;
5501 implicit_use_args[num_implicit_use_args++] = 4;
5502 implicit_use_args[num_implicit_use_args++] = 5;
5503 break;
5504 default:
5505 gcc_unreachable ();
5506 }
5507
5508 tree fnarg = gimple_call_arg (t, fnpos);
5509 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5510 tree fndecl = TREE_OPERAND (fnarg, 0);
5511 if (fndecl_maybe_in_other_partition (fndecl))
5512 /* Fallthru to general call handling. */
5513 break;
5514
5515 varinfo_t cfi = get_vi_for_tree (fndecl);
5516
5517 tree arg = gimple_call_arg (t, argpos);
5518
5519 /* Parameter passed by value is used. */
5520 lhs = get_function_part_constraint (fi, fi_uses);
5521 struct constraint_expr *rhsp;
5522 get_constraint_for (arg, &rhsc);
5523 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5524 process_constraint (new_constraint (lhs, *rhsp));
5525 rhsc.truncate (0);
5526
5527 /* Handle parameters used by the call, but not used in cfi, as
5528 implicitly used by cfi. */
5529 lhs = get_function_part_constraint (cfi, fi_uses);
5530 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5531 {
5532 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5533 get_constraint_for (arg, &rhsc);
5534 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5535 process_constraint (new_constraint (lhs, *rhsp));
5536 rhsc.truncate (0);
5537 }
5538
5539 /* The caller clobbers what the callee does. */
5540 lhs = get_function_part_constraint (fi, fi_clobbers);
5541 rhs = get_function_part_constraint (cfi, fi_clobbers);
5542 process_constraint (new_constraint (lhs, rhs));
5543
5544 /* The caller uses what the callee does. */
5545 lhs = get_function_part_constraint (fi, fi_uses);
5546 rhs = get_function_part_constraint (cfi, fi_uses);
5547 process_constraint (new_constraint (lhs, rhs));
5548
5549 return;
5550 }
5551 /* printf-style functions may have hooks to set pointers to
5552 point to somewhere into the generated string. Leave them
5553 for a later exercise... */
5554 default:
5555 /* Fallthru to general call handling. */;
5556 }
5557
5558 /* Parameters passed by value are used. */
5559 lhs = get_function_part_constraint (fi, fi_uses);
5560 for (i = 0; i < gimple_call_num_args (t); i++)
5561 {
5562 struct constraint_expr *rhsp;
5563 tree arg = gimple_call_arg (t, i);
5564
5565 if (TREE_CODE (arg) == SSA_NAME
5566 || is_gimple_min_invariant (arg))
5567 continue;
5568
5569 get_constraint_for_address_of (arg, &rhsc);
5570 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5571 process_constraint (new_constraint (lhs, *rhsp));
5572 rhsc.truncate (0);
5573 }
5574
5575 /* Build constraints for propagating clobbers/uses along the
5576 callgraph edges. */
5577 cfi = get_fi_for_callee (call_stmt);
5578 if (cfi->id == anything_id)
5579 {
5580 if (gimple_vdef (t))
5581 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5582 anything_id);
5583 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5584 anything_id);
5585 return;
5586 }
5587
5588 /* For callees without function info (that's external functions),
5589 ESCAPED is clobbered and used. */
5590 if (cfi->decl
5591 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5592 && !cfi->is_fn_info)
5593 {
5594 varinfo_t vi;
5595
5596 if (gimple_vdef (t))
5597 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5598 escaped_id);
5599 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5600
5601 /* Also honor the call statement use/clobber info. */
5602 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5603 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5604 vi->id);
5605 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5606 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5607 vi->id);
5608 return;
5609 }
5610
5611 /* Otherwise the caller clobbers and uses what the callee does.
5612 ??? This should use a new complex constraint that filters
5613 local variables of the callee. */
5614 if (gimple_vdef (t))
5615 {
5616 lhs = get_function_part_constraint (fi, fi_clobbers);
5617 rhs = get_function_part_constraint (cfi, fi_clobbers);
5618 process_constraint (new_constraint (lhs, rhs));
5619 }
5620 lhs = get_function_part_constraint (fi, fi_uses);
5621 rhs = get_function_part_constraint (cfi, fi_uses);
5622 process_constraint (new_constraint (lhs, rhs));
5623 }
5624 else if (gimple_code (t) == GIMPLE_ASM)
5625 {
5626 /* ??? Ick. We can do better. */
5627 if (gimple_vdef (t))
5628 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5629 anything_id);
5630 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5631 anything_id);
5632 }
5633 }
5634
5635
5636 /* Find the first varinfo in the same variable as START that overlaps with
5637 OFFSET. Return NULL if we can't find one. */
5638
5639 static varinfo_t
first_vi_for_offset(varinfo_t start,unsigned HOST_WIDE_INT offset)5640 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5641 {
5642 /* If the offset is outside of the variable, bail out. */
5643 if (offset >= start->fullsize)
5644 return NULL;
5645
5646 /* If we cannot reach offset from start, lookup the first field
5647 and start from there. */
5648 if (start->offset > offset)
5649 start = get_varinfo (start->head);
5650
5651 while (start)
5652 {
5653 /* We may not find a variable in the field list with the actual
5654 offset when we have glommed a structure to a variable.
5655 In that case, however, offset should still be within the size
5656 of the variable. */
5657 if (offset >= start->offset
5658 && (offset - start->offset) < start->size)
5659 return start;
5660
5661 start = vi_next (start);
5662 }
5663
5664 return NULL;
5665 }
5666
5667 /* Find the first varinfo in the same variable as START that overlaps with
5668 OFFSET. If there is no such varinfo the varinfo directly preceding
5669 OFFSET is returned. */
5670
5671 static varinfo_t
first_or_preceding_vi_for_offset(varinfo_t start,unsigned HOST_WIDE_INT offset)5672 first_or_preceding_vi_for_offset (varinfo_t start,
5673 unsigned HOST_WIDE_INT offset)
5674 {
5675 /* If we cannot reach offset from start, lookup the first field
5676 and start from there. */
5677 if (start->offset > offset)
5678 start = get_varinfo (start->head);
5679
5680 /* We may not find a variable in the field list with the actual
5681 offset when we have glommed a structure to a variable.
5682 In that case, however, offset should still be within the size
5683 of the variable.
5684 If we got beyond the offset we look for return the field
5685 directly preceding offset which may be the last field. */
5686 while (start->next
5687 && offset >= start->offset
5688 && !((offset - start->offset) < start->size))
5689 start = vi_next (start);
5690
5691 return start;
5692 }
5693
5694
5695 /* This structure is used during pushing fields onto the fieldstack
5696 to track the offset of the field, since bitpos_of_field gives it
5697 relative to its immediate containing type, and we want it relative
5698 to the ultimate containing object. */
5699
5700 struct fieldoff
5701 {
5702 /* Offset from the base of the base containing object to this field. */
5703 HOST_WIDE_INT offset;
5704
5705 /* Size, in bits, of the field. */
5706 unsigned HOST_WIDE_INT size;
5707
5708 unsigned has_unknown_size : 1;
5709
5710 unsigned must_have_pointers : 1;
5711
5712 unsigned may_have_pointers : 1;
5713
5714 unsigned only_restrict_pointers : 1;
5715
5716 tree restrict_pointed_type;
5717 };
5718 typedef struct fieldoff fieldoff_s;
5719
5720
5721 /* qsort comparison function for two fieldoff's PA and PB */
5722
5723 static int
fieldoff_compare(const void * pa,const void * pb)5724 fieldoff_compare (const void *pa, const void *pb)
5725 {
5726 const fieldoff_s *foa = (const fieldoff_s *)pa;
5727 const fieldoff_s *fob = (const fieldoff_s *)pb;
5728 unsigned HOST_WIDE_INT foasize, fobsize;
5729
5730 if (foa->offset < fob->offset)
5731 return -1;
5732 else if (foa->offset > fob->offset)
5733 return 1;
5734
5735 foasize = foa->size;
5736 fobsize = fob->size;
5737 if (foasize < fobsize)
5738 return -1;
5739 else if (foasize > fobsize)
5740 return 1;
5741 return 0;
5742 }
5743
5744 /* Sort a fieldstack according to the field offset and sizes. */
5745 static void
sort_fieldstack(vec<fieldoff_s> & fieldstack)5746 sort_fieldstack (vec<fieldoff_s> &fieldstack)
5747 {
5748 fieldstack.qsort (fieldoff_compare);
5749 }
5750
5751 /* Return true if T is a type that can have subvars. */
5752
5753 static inline bool
type_can_have_subvars(const_tree t)5754 type_can_have_subvars (const_tree t)
5755 {
5756 /* Aggregates without overlapping fields can have subvars. */
5757 return TREE_CODE (t) == RECORD_TYPE;
5758 }
5759
5760 /* Return true if V is a tree that we can have subvars for.
5761 Normally, this is any aggregate type. Also complex
5762 types which are not gimple registers can have subvars. */
5763
5764 static inline bool
var_can_have_subvars(const_tree v)5765 var_can_have_subvars (const_tree v)
5766 {
5767 /* Volatile variables should never have subvars. */
5768 if (TREE_THIS_VOLATILE (v))
5769 return false;
5770
5771 /* Non decls or memory tags can never have subvars. */
5772 if (!DECL_P (v))
5773 return false;
5774
5775 return type_can_have_subvars (TREE_TYPE (v));
5776 }
5777
5778 /* Return true if T is a type that does contain pointers. */
5779
5780 static bool
type_must_have_pointers(tree type)5781 type_must_have_pointers (tree type)
5782 {
5783 if (POINTER_TYPE_P (type))
5784 return true;
5785
5786 if (TREE_CODE (type) == ARRAY_TYPE)
5787 return type_must_have_pointers (TREE_TYPE (type));
5788
5789 /* A function or method can have pointers as arguments, so track
5790 those separately. */
5791 if (TREE_CODE (type) == FUNCTION_TYPE
5792 || TREE_CODE (type) == METHOD_TYPE)
5793 return true;
5794
5795 return false;
5796 }
5797
5798 static bool
field_must_have_pointers(tree t)5799 field_must_have_pointers (tree t)
5800 {
5801 return type_must_have_pointers (TREE_TYPE (t));
5802 }
5803
5804 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5805 the fields of TYPE onto fieldstack, recording their offsets along
5806 the way.
5807
5808 OFFSET is used to keep track of the offset in this entire
5809 structure, rather than just the immediately containing structure.
5810 Returns false if the caller is supposed to handle the field we
5811 recursed for. */
5812
5813 static bool
push_fields_onto_fieldstack(tree type,vec<fieldoff_s> * fieldstack,HOST_WIDE_INT offset)5814 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5815 HOST_WIDE_INT offset)
5816 {
5817 tree field;
5818 bool empty_p = true;
5819
5820 if (TREE_CODE (type) != RECORD_TYPE)
5821 return false;
5822
5823 /* If the vector of fields is growing too big, bail out early.
5824 Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5825 sure this fails. */
5826 if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive)
5827 return false;
5828
5829 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5830 if (TREE_CODE (field) == FIELD_DECL)
5831 {
5832 bool push = false;
5833 HOST_WIDE_INT foff = bitpos_of_field (field);
5834 tree field_type = TREE_TYPE (field);
5835
5836 if (!var_can_have_subvars (field)
5837 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5838 || TREE_CODE (field_type) == UNION_TYPE)
5839 push = true;
5840 else if (!push_fields_onto_fieldstack
5841 (field_type, fieldstack, offset + foff)
5842 && (DECL_SIZE (field)
5843 && !integer_zerop (DECL_SIZE (field))))
5844 /* Empty structures may have actual size, like in C++. So
5845 see if we didn't push any subfields and the size is
5846 nonzero, push the field onto the stack. */
5847 push = true;
5848
5849 if (push)
5850 {
5851 fieldoff_s *pair = NULL;
5852 bool has_unknown_size = false;
5853 bool must_have_pointers_p;
5854
5855 if (!fieldstack->is_empty ())
5856 pair = &fieldstack->last ();
5857
5858 /* If there isn't anything at offset zero, create sth. */
5859 if (!pair
5860 && offset + foff != 0)
5861 {
5862 fieldoff_s e
5863 = {0, offset + foff, false, false, true, false, NULL_TREE};
5864 pair = fieldstack->safe_push (e);
5865 }
5866
5867 if (!DECL_SIZE (field)
5868 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5869 has_unknown_size = true;
5870
5871 /* If adjacent fields do not contain pointers merge them. */
5872 must_have_pointers_p = field_must_have_pointers (field);
5873 if (pair
5874 && !has_unknown_size
5875 && !must_have_pointers_p
5876 && !pair->must_have_pointers
5877 && !pair->has_unknown_size
5878 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5879 {
5880 pair->size += tree_to_uhwi (DECL_SIZE (field));
5881 }
5882 else
5883 {
5884 fieldoff_s e;
5885 e.offset = offset + foff;
5886 e.has_unknown_size = has_unknown_size;
5887 if (!has_unknown_size)
5888 e.size = tree_to_uhwi (DECL_SIZE (field));
5889 else
5890 e.size = -1;
5891 e.must_have_pointers = must_have_pointers_p;
5892 e.may_have_pointers = true;
5893 e.only_restrict_pointers
5894 = (!has_unknown_size
5895 && POINTER_TYPE_P (field_type)
5896 && TYPE_RESTRICT (field_type));
5897 if (e.only_restrict_pointers)
5898 e.restrict_pointed_type = TREE_TYPE (field_type);
5899 fieldstack->safe_push (e);
5900 }
5901 }
5902
5903 empty_p = false;
5904 }
5905
5906 return !empty_p;
5907 }
5908
5909 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5910 if it is a varargs function. */
5911
5912 static unsigned int
count_num_arguments(tree decl,bool * is_varargs)5913 count_num_arguments (tree decl, bool *is_varargs)
5914 {
5915 unsigned int num = 0;
5916 tree t;
5917
5918 /* Capture named arguments for K&R functions. They do not
5919 have a prototype and thus no TYPE_ARG_TYPES. */
5920 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5921 ++num;
5922
5923 /* Check if the function has variadic arguments. */
5924 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5925 if (TREE_VALUE (t) == void_type_node)
5926 break;
5927 if (!t)
5928 *is_varargs = true;
5929
5930 return num;
5931 }
5932
5933 /* Creation function node for DECL, using NAME, and return the index
5934 of the variable we've created for the function. If NONLOCAL_p, create
5935 initial constraints. */
5936
5937 static varinfo_t
create_function_info_for(tree decl,const char * name,bool add_id,bool nonlocal_p)5938 create_function_info_for (tree decl, const char *name, bool add_id,
5939 bool nonlocal_p)
5940 {
5941 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5942 varinfo_t vi, prev_vi;
5943 tree arg;
5944 unsigned int i;
5945 bool is_varargs = false;
5946 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5947
5948 /* Create the variable info. */
5949
5950 vi = new_var_info (decl, name, add_id);
5951 vi->offset = 0;
5952 vi->size = 1;
5953 vi->fullsize = fi_parm_base + num_args;
5954 vi->is_fn_info = 1;
5955 vi->may_have_pointers = false;
5956 if (is_varargs)
5957 vi->fullsize = ~0;
5958 insert_vi_for_tree (vi->decl, vi);
5959
5960 prev_vi = vi;
5961
5962 /* Create a variable for things the function clobbers and one for
5963 things the function uses. */
5964 {
5965 varinfo_t clobbervi, usevi;
5966 const char *newname;
5967 char *tempname;
5968
5969 tempname = xasprintf ("%s.clobber", name);
5970 newname = ggc_strdup (tempname);
5971 free (tempname);
5972
5973 clobbervi = new_var_info (NULL, newname, false);
5974 clobbervi->offset = fi_clobbers;
5975 clobbervi->size = 1;
5976 clobbervi->fullsize = vi->fullsize;
5977 clobbervi->is_full_var = true;
5978 clobbervi->is_global_var = false;
5979 clobbervi->is_reg_var = true;
5980
5981 gcc_assert (prev_vi->offset < clobbervi->offset);
5982 prev_vi->next = clobbervi->id;
5983 prev_vi = clobbervi;
5984
5985 tempname = xasprintf ("%s.use", name);
5986 newname = ggc_strdup (tempname);
5987 free (tempname);
5988
5989 usevi = new_var_info (NULL, newname, false);
5990 usevi->offset = fi_uses;
5991 usevi->size = 1;
5992 usevi->fullsize = vi->fullsize;
5993 usevi->is_full_var = true;
5994 usevi->is_global_var = false;
5995 usevi->is_reg_var = true;
5996
5997 gcc_assert (prev_vi->offset < usevi->offset);
5998 prev_vi->next = usevi->id;
5999 prev_vi = usevi;
6000 }
6001
6002 /* And one for the static chain. */
6003 if (fn->static_chain_decl != NULL_TREE)
6004 {
6005 varinfo_t chainvi;
6006 const char *newname;
6007 char *tempname;
6008
6009 tempname = xasprintf ("%s.chain", name);
6010 newname = ggc_strdup (tempname);
6011 free (tempname);
6012
6013 chainvi = new_var_info (fn->static_chain_decl, newname, false);
6014 chainvi->offset = fi_static_chain;
6015 chainvi->size = 1;
6016 chainvi->fullsize = vi->fullsize;
6017 chainvi->is_full_var = true;
6018 chainvi->is_global_var = false;
6019
6020 insert_vi_for_tree (fn->static_chain_decl, chainvi);
6021
6022 if (nonlocal_p
6023 && chainvi->may_have_pointers)
6024 make_constraint_from (chainvi, nonlocal_id);
6025
6026 gcc_assert (prev_vi->offset < chainvi->offset);
6027 prev_vi->next = chainvi->id;
6028 prev_vi = chainvi;
6029 }
6030
6031 /* Create a variable for the return var. */
6032 if (DECL_RESULT (decl) != NULL
6033 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
6034 {
6035 varinfo_t resultvi;
6036 const char *newname;
6037 char *tempname;
6038 tree resultdecl = decl;
6039
6040 if (DECL_RESULT (decl))
6041 resultdecl = DECL_RESULT (decl);
6042
6043 tempname = xasprintf ("%s.result", name);
6044 newname = ggc_strdup (tempname);
6045 free (tempname);
6046
6047 resultvi = new_var_info (resultdecl, newname, false);
6048 resultvi->offset = fi_result;
6049 resultvi->size = 1;
6050 resultvi->fullsize = vi->fullsize;
6051 resultvi->is_full_var = true;
6052 if (DECL_RESULT (decl))
6053 resultvi->may_have_pointers = true;
6054
6055 if (DECL_RESULT (decl))
6056 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
6057
6058 if (nonlocal_p
6059 && DECL_RESULT (decl)
6060 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
6061 make_constraint_from (resultvi, nonlocal_id);
6062
6063 gcc_assert (prev_vi->offset < resultvi->offset);
6064 prev_vi->next = resultvi->id;
6065 prev_vi = resultvi;
6066 }
6067
6068 /* We also need to make function return values escape. Nothing
6069 escapes by returning from main though. */
6070 if (nonlocal_p
6071 && !MAIN_NAME_P (DECL_NAME (decl)))
6072 {
6073 varinfo_t fi, rvi;
6074 fi = lookup_vi_for_tree (decl);
6075 rvi = first_vi_for_offset (fi, fi_result);
6076 if (rvi && rvi->offset == fi_result)
6077 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
6078 }
6079
6080 /* Set up variables for each argument. */
6081 arg = DECL_ARGUMENTS (decl);
6082 for (i = 0; i < num_args; i++)
6083 {
6084 varinfo_t argvi;
6085 const char *newname;
6086 char *tempname;
6087 tree argdecl = decl;
6088
6089 if (arg)
6090 argdecl = arg;
6091
6092 tempname = xasprintf ("%s.arg%d", name, i);
6093 newname = ggc_strdup (tempname);
6094 free (tempname);
6095
6096 argvi = new_var_info (argdecl, newname, false);
6097 argvi->offset = fi_parm_base + i;
6098 argvi->size = 1;
6099 argvi->is_full_var = true;
6100 argvi->fullsize = vi->fullsize;
6101 if (arg)
6102 argvi->may_have_pointers = true;
6103
6104 if (arg)
6105 insert_vi_for_tree (arg, argvi);
6106
6107 if (nonlocal_p
6108 && argvi->may_have_pointers)
6109 make_constraint_from (argvi, nonlocal_id);
6110
6111 gcc_assert (prev_vi->offset < argvi->offset);
6112 prev_vi->next = argvi->id;
6113 prev_vi = argvi;
6114 if (arg)
6115 arg = DECL_CHAIN (arg);
6116 }
6117
6118 /* Add one representative for all further args. */
6119 if (is_varargs)
6120 {
6121 varinfo_t argvi;
6122 const char *newname;
6123 char *tempname;
6124 tree decl;
6125
6126 tempname = xasprintf ("%s.varargs", name);
6127 newname = ggc_strdup (tempname);
6128 free (tempname);
6129
6130 /* We need sth that can be pointed to for va_start. */
6131 decl = build_fake_var_decl (ptr_type_node);
6132
6133 argvi = new_var_info (decl, newname, false);
6134 argvi->offset = fi_parm_base + num_args;
6135 argvi->size = ~0;
6136 argvi->is_full_var = true;
6137 argvi->is_heap_var = true;
6138 argvi->fullsize = vi->fullsize;
6139
6140 if (nonlocal_p
6141 && argvi->may_have_pointers)
6142 make_constraint_from (argvi, nonlocal_id);
6143
6144 gcc_assert (prev_vi->offset < argvi->offset);
6145 prev_vi->next = argvi->id;
6146 }
6147
6148 return vi;
6149 }
6150
6151
6152 /* Return true if FIELDSTACK contains fields that overlap.
6153 FIELDSTACK is assumed to be sorted by offset. */
6154
6155 static bool
check_for_overlaps(const vec<fieldoff_s> & fieldstack)6156 check_for_overlaps (const vec<fieldoff_s> &fieldstack)
6157 {
6158 fieldoff_s *fo = NULL;
6159 unsigned int i;
6160 HOST_WIDE_INT lastoffset = -1;
6161
6162 FOR_EACH_VEC_ELT (fieldstack, i, fo)
6163 {
6164 if (fo->offset == lastoffset)
6165 return true;
6166 lastoffset = fo->offset;
6167 }
6168 return false;
6169 }
6170
6171 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6172 This will also create any varinfo structures necessary for fields
6173 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6174 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6175 restrict pointers. This is needed to prevent infinite recursion.
6176 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6177 does not advertise it. */
6178
6179 static varinfo_t
create_variable_info_for_1(tree decl,const char * name,bool add_id,bool handle_param,bitmap handled_struct_type,bool add_restrict=false)6180 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6181 bool handle_param, bitmap handled_struct_type,
6182 bool add_restrict = false)
6183 {
6184 varinfo_t vi, newvi;
6185 tree decl_type = TREE_TYPE (decl);
6186 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6187 auto_vec<fieldoff_s> fieldstack;
6188 fieldoff_s *fo;
6189 unsigned int i;
6190
6191 if (!declsize
6192 || !tree_fits_uhwi_p (declsize))
6193 {
6194 vi = new_var_info (decl, name, add_id);
6195 vi->offset = 0;
6196 vi->size = ~0;
6197 vi->fullsize = ~0;
6198 vi->is_unknown_size_var = true;
6199 vi->is_full_var = true;
6200 vi->may_have_pointers = true;
6201 return vi;
6202 }
6203
6204 /* Collect field information. */
6205 if (use_field_sensitive
6206 && var_can_have_subvars (decl)
6207 /* ??? Force us to not use subfields for globals in IPA mode.
6208 Else we'd have to parse arbitrary initializers. */
6209 && !(in_ipa_mode
6210 && is_global_var (decl)))
6211 {
6212 fieldoff_s *fo = NULL;
6213 bool notokay = false;
6214 unsigned int i;
6215
6216 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6217
6218 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6219 if (fo->has_unknown_size
6220 || fo->offset < 0)
6221 {
6222 notokay = true;
6223 break;
6224 }
6225
6226 /* We can't sort them if we have a field with a variable sized type,
6227 which will make notokay = true. In that case, we are going to return
6228 without creating varinfos for the fields anyway, so sorting them is a
6229 waste to boot. */
6230 if (!notokay)
6231 {
6232 sort_fieldstack (fieldstack);
6233 /* Due to some C++ FE issues, like PR 22488, we might end up
6234 what appear to be overlapping fields even though they,
6235 in reality, do not overlap. Until the C++ FE is fixed,
6236 we will simply disable field-sensitivity for these cases. */
6237 notokay = check_for_overlaps (fieldstack);
6238 }
6239
6240 if (notokay)
6241 fieldstack.release ();
6242 }
6243
6244 /* If we didn't end up collecting sub-variables create a full
6245 variable for the decl. */
6246 if (fieldstack.length () == 0
6247 || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive)
6248 {
6249 vi = new_var_info (decl, name, add_id);
6250 vi->offset = 0;
6251 vi->may_have_pointers = true;
6252 vi->fullsize = tree_to_uhwi (declsize);
6253 vi->size = vi->fullsize;
6254 vi->is_full_var = true;
6255 if (POINTER_TYPE_P (decl_type)
6256 && (TYPE_RESTRICT (decl_type) || add_restrict))
6257 vi->only_restrict_pointers = 1;
6258 if (vi->only_restrict_pointers
6259 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6260 && handle_param
6261 && !bitmap_bit_p (handled_struct_type,
6262 TYPE_UID (TREE_TYPE (decl_type))))
6263 {
6264 varinfo_t rvi;
6265 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6266 DECL_EXTERNAL (heapvar) = 1;
6267 if (var_can_have_subvars (heapvar))
6268 bitmap_set_bit (handled_struct_type,
6269 TYPE_UID (TREE_TYPE (decl_type)));
6270 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6271 true, handled_struct_type);
6272 if (var_can_have_subvars (heapvar))
6273 bitmap_clear_bit (handled_struct_type,
6274 TYPE_UID (TREE_TYPE (decl_type)));
6275 rvi->is_restrict_var = 1;
6276 insert_vi_for_tree (heapvar, rvi);
6277 make_constraint_from (vi, rvi->id);
6278 make_param_constraints (rvi);
6279 }
6280 fieldstack.release ();
6281 return vi;
6282 }
6283
6284 vi = new_var_info (decl, name, add_id);
6285 vi->fullsize = tree_to_uhwi (declsize);
6286 if (fieldstack.length () == 1)
6287 vi->is_full_var = true;
6288 for (i = 0, newvi = vi;
6289 fieldstack.iterate (i, &fo);
6290 ++i, newvi = vi_next (newvi))
6291 {
6292 const char *newname = NULL;
6293 char *tempname;
6294
6295 if (dump_file)
6296 {
6297 if (fieldstack.length () != 1)
6298 {
6299 tempname
6300 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6301 "+" HOST_WIDE_INT_PRINT_DEC, name,
6302 fo->offset, fo->size);
6303 newname = ggc_strdup (tempname);
6304 free (tempname);
6305 }
6306 }
6307 else
6308 newname = "NULL";
6309
6310 if (newname)
6311 newvi->name = newname;
6312 newvi->offset = fo->offset;
6313 newvi->size = fo->size;
6314 newvi->fullsize = vi->fullsize;
6315 newvi->may_have_pointers = fo->may_have_pointers;
6316 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6317 if (handle_param
6318 && newvi->only_restrict_pointers
6319 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6320 && !bitmap_bit_p (handled_struct_type,
6321 TYPE_UID (fo->restrict_pointed_type)))
6322 {
6323 varinfo_t rvi;
6324 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6325 DECL_EXTERNAL (heapvar) = 1;
6326 if (var_can_have_subvars (heapvar))
6327 bitmap_set_bit (handled_struct_type,
6328 TYPE_UID (fo->restrict_pointed_type));
6329 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6330 true, handled_struct_type);
6331 if (var_can_have_subvars (heapvar))
6332 bitmap_clear_bit (handled_struct_type,
6333 TYPE_UID (fo->restrict_pointed_type));
6334 rvi->is_restrict_var = 1;
6335 insert_vi_for_tree (heapvar, rvi);
6336 make_constraint_from (newvi, rvi->id);
6337 make_param_constraints (rvi);
6338 }
6339 if (i + 1 < fieldstack.length ())
6340 {
6341 varinfo_t tem = new_var_info (decl, name, false);
6342 newvi->next = tem->id;
6343 tem->head = vi->id;
6344 }
6345 }
6346
6347 return vi;
6348 }
6349
6350 static unsigned int
create_variable_info_for(tree decl,const char * name,bool add_id)6351 create_variable_info_for (tree decl, const char *name, bool add_id)
6352 {
6353 /* First see if we are dealing with an ifunc resolver call and
6354 assiociate that with a call to the resolver function result. */
6355 cgraph_node *node;
6356 if (in_ipa_mode
6357 && TREE_CODE (decl) == FUNCTION_DECL
6358 && (node = cgraph_node::get (decl))
6359 && node->ifunc_resolver)
6360 {
6361 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6362 constraint_expr rhs
6363 = get_function_part_constraint (fi, fi_result);
6364 fi = new_var_info (NULL_TREE, "ifuncres", true);
6365 fi->is_reg_var = true;
6366 constraint_expr lhs;
6367 lhs.type = SCALAR;
6368 lhs.var = fi->id;
6369 lhs.offset = 0;
6370 process_constraint (new_constraint (lhs, rhs));
6371 insert_vi_for_tree (decl, fi);
6372 return fi->id;
6373 }
6374
6375 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6376 unsigned int id = vi->id;
6377
6378 insert_vi_for_tree (decl, vi);
6379
6380 if (!VAR_P (decl))
6381 return id;
6382
6383 /* Create initial constraints for globals. */
6384 for (; vi; vi = vi_next (vi))
6385 {
6386 if (!vi->may_have_pointers
6387 || !vi->is_global_var)
6388 continue;
6389
6390 /* Mark global restrict qualified pointers. */
6391 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6392 && TYPE_RESTRICT (TREE_TYPE (decl)))
6393 || vi->only_restrict_pointers)
6394 {
6395 varinfo_t rvi
6396 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6397 true);
6398 /* ??? For now exclude reads from globals as restrict sources
6399 if those are not (indirectly) from incoming parameters. */
6400 rvi->is_restrict_var = false;
6401 continue;
6402 }
6403
6404 /* In non-IPA mode the initializer from nonlocal is all we need. */
6405 if (!in_ipa_mode
6406 || DECL_HARD_REGISTER (decl))
6407 make_copy_constraint (vi, nonlocal_id);
6408
6409 /* In IPA mode parse the initializer and generate proper constraints
6410 for it. */
6411 else
6412 {
6413 varpool_node *vnode = varpool_node::get (decl);
6414
6415 /* For escaped variables initialize them from nonlocal. */
6416 if (!vnode->all_refs_explicit_p ())
6417 make_copy_constraint (vi, nonlocal_id);
6418
6419 /* If this is a global variable with an initializer and we are in
6420 IPA mode generate constraints for it. */
6421 ipa_ref *ref;
6422 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6423 {
6424 auto_vec<ce_s> rhsc;
6425 struct constraint_expr lhs, *rhsp;
6426 unsigned i;
6427 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6428 lhs.var = vi->id;
6429 lhs.offset = 0;
6430 lhs.type = SCALAR;
6431 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6432 process_constraint (new_constraint (lhs, *rhsp));
6433 /* If this is a variable that escapes from the unit
6434 the initializer escapes as well. */
6435 if (!vnode->all_refs_explicit_p ())
6436 {
6437 lhs.var = escaped_id;
6438 lhs.offset = 0;
6439 lhs.type = SCALAR;
6440 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6441 process_constraint (new_constraint (lhs, *rhsp));
6442 }
6443 }
6444 }
6445 }
6446
6447 return id;
6448 }
6449
6450 /* Print out the points-to solution for VAR to FILE. */
6451
6452 static void
dump_solution_for_var(FILE * file,unsigned int var)6453 dump_solution_for_var (FILE *file, unsigned int var)
6454 {
6455 varinfo_t vi = get_varinfo (var);
6456 unsigned int i;
6457 bitmap_iterator bi;
6458
6459 /* Dump the solution for unified vars anyway, this avoids difficulties
6460 in scanning dumps in the testsuite. */
6461 fprintf (file, "%s = { ", vi->name);
6462 vi = get_varinfo (find (var));
6463 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6464 fprintf (file, "%s ", get_varinfo (i)->name);
6465 fprintf (file, "}");
6466
6467 /* But note when the variable was unified. */
6468 if (vi->id != var)
6469 fprintf (file, " same as %s", vi->name);
6470
6471 fprintf (file, "\n");
6472 }
6473
6474 /* Print the points-to solution for VAR to stderr. */
6475
6476 DEBUG_FUNCTION void
debug_solution_for_var(unsigned int var)6477 debug_solution_for_var (unsigned int var)
6478 {
6479 dump_solution_for_var (stderr, var);
6480 }
6481
6482 /* Register the constraints for function parameter related VI. */
6483
6484 static void
make_param_constraints(varinfo_t vi)6485 make_param_constraints (varinfo_t vi)
6486 {
6487 for (; vi; vi = vi_next (vi))
6488 {
6489 if (vi->only_restrict_pointers)
6490 ;
6491 else if (vi->may_have_pointers)
6492 make_constraint_from (vi, nonlocal_id);
6493
6494 if (vi->is_full_var)
6495 break;
6496 }
6497 }
6498
6499 /* Create varinfo structures for all of the variables in the
6500 function for intraprocedural mode. */
6501
6502 static void
intra_create_variable_infos(struct function * fn)6503 intra_create_variable_infos (struct function *fn)
6504 {
6505 tree t;
6506 bitmap handled_struct_type = NULL;
6507 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6508
6509 /* For each incoming pointer argument arg, create the constraint ARG
6510 = NONLOCAL or a dummy variable if it is a restrict qualified
6511 passed-by-reference argument. */
6512 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6513 {
6514 if (handled_struct_type == NULL)
6515 handled_struct_type = BITMAP_ALLOC (NULL);
6516
6517 varinfo_t p
6518 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6519 handled_struct_type, this_parm_in_ctor);
6520 insert_vi_for_tree (t, p);
6521
6522 make_param_constraints (p);
6523
6524 this_parm_in_ctor = false;
6525 }
6526
6527 if (handled_struct_type != NULL)
6528 BITMAP_FREE (handled_struct_type);
6529
6530 /* Add a constraint for a result decl that is passed by reference. */
6531 if (DECL_RESULT (fn->decl)
6532 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6533 {
6534 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6535
6536 for (p = result_vi; p; p = vi_next (p))
6537 make_constraint_from (p, nonlocal_id);
6538 }
6539
6540 /* Add a constraint for the incoming static chain parameter. */
6541 if (fn->static_chain_decl != NULL_TREE)
6542 {
6543 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6544
6545 for (p = chain_vi; p; p = vi_next (p))
6546 make_constraint_from (p, nonlocal_id);
6547 }
6548 }
6549
6550 /* Structure used to put solution bitmaps in a hashtable so they can
6551 be shared among variables with the same points-to set. */
6552
6553 typedef struct shared_bitmap_info
6554 {
6555 bitmap pt_vars;
6556 hashval_t hashcode;
6557 } *shared_bitmap_info_t;
6558 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6559
6560 /* Shared_bitmap hashtable helpers. */
6561
6562 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6563 {
6564 static inline hashval_t hash (const shared_bitmap_info *);
6565 static inline bool equal (const shared_bitmap_info *,
6566 const shared_bitmap_info *);
6567 };
6568
6569 /* Hash function for a shared_bitmap_info_t */
6570
6571 inline hashval_t
hash(const shared_bitmap_info * bi)6572 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6573 {
6574 return bi->hashcode;
6575 }
6576
6577 /* Equality function for two shared_bitmap_info_t's. */
6578
6579 inline bool
equal(const shared_bitmap_info * sbi1,const shared_bitmap_info * sbi2)6580 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6581 const shared_bitmap_info *sbi2)
6582 {
6583 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6584 }
6585
6586 /* Shared_bitmap hashtable. */
6587
6588 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6589
6590 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6591 existing instance if there is one, NULL otherwise. */
6592
6593 static bitmap
shared_bitmap_lookup(bitmap pt_vars)6594 shared_bitmap_lookup (bitmap pt_vars)
6595 {
6596 shared_bitmap_info **slot;
6597 struct shared_bitmap_info sbi;
6598
6599 sbi.pt_vars = pt_vars;
6600 sbi.hashcode = bitmap_hash (pt_vars);
6601
6602 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6603 if (!slot)
6604 return NULL;
6605 else
6606 return (*slot)->pt_vars;
6607 }
6608
6609
6610 /* Add a bitmap to the shared bitmap hashtable. */
6611
6612 static void
shared_bitmap_add(bitmap pt_vars)6613 shared_bitmap_add (bitmap pt_vars)
6614 {
6615 shared_bitmap_info **slot;
6616 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6617
6618 sbi->pt_vars = pt_vars;
6619 sbi->hashcode = bitmap_hash (pt_vars);
6620
6621 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6622 gcc_assert (!*slot);
6623 *slot = sbi;
6624 }
6625
6626
6627 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6628
6629 static void
set_uids_in_ptset(bitmap into,bitmap from,struct pt_solution * pt,tree fndecl)6630 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6631 tree fndecl)
6632 {
6633 unsigned int i;
6634 bitmap_iterator bi;
6635 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6636 bool everything_escaped
6637 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6638
6639 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6640 {
6641 varinfo_t vi = get_varinfo (i);
6642
6643 if (vi->is_artificial_var)
6644 continue;
6645
6646 if (everything_escaped
6647 || (escaped_vi->solution
6648 && bitmap_bit_p (escaped_vi->solution, i)))
6649 {
6650 pt->vars_contains_escaped = true;
6651 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6652 }
6653
6654 if (vi->is_restrict_var)
6655 pt->vars_contains_restrict = true;
6656
6657 if (VAR_P (vi->decl)
6658 || TREE_CODE (vi->decl) == PARM_DECL
6659 || TREE_CODE (vi->decl) == RESULT_DECL)
6660 {
6661 /* If we are in IPA mode we will not recompute points-to
6662 sets after inlining so make sure they stay valid. */
6663 if (in_ipa_mode
6664 && !DECL_PT_UID_SET_P (vi->decl))
6665 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6666
6667 /* Add the decl to the points-to set. Note that the points-to
6668 set contains global variables. */
6669 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6670 if (vi->is_global_var
6671 /* In IPA mode the escaped_heap trick doesn't work as
6672 ESCAPED is escaped from the unit but
6673 pt_solution_includes_global needs to answer true for
6674 all variables not automatic within a function.
6675 For the same reason is_global_var is not the
6676 correct flag to track - local variables from other
6677 functions also need to be considered global.
6678 Conveniently all HEAP vars are not put in function
6679 scope. */
6680 || (in_ipa_mode
6681 && fndecl
6682 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6683 pt->vars_contains_nonlocal = true;
6684
6685 /* If we have a variable that is interposable record that fact
6686 for pointer comparison simplification. */
6687 if (VAR_P (vi->decl)
6688 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6689 && ! decl_binds_to_current_def_p (vi->decl))
6690 pt->vars_contains_interposable = true;
6691
6692 /* If this is a local variable we can have overlapping lifetime
6693 of different function invocations through recursion duplicate
6694 it with its shadow variable. */
6695 if (in_ipa_mode
6696 && vi->shadow_var_uid != 0)
6697 {
6698 bitmap_set_bit (into, vi->shadow_var_uid);
6699 pt->vars_contains_nonlocal = true;
6700 }
6701 }
6702
6703 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6704 || TREE_CODE (vi->decl) == LABEL_DECL)
6705 {
6706 /* Nothing should read/write from/to code so we can
6707 save bits by not including them in the points-to bitmaps.
6708 Still mark the points-to set as containing global memory
6709 to make code-patching possible - see PR70128. */
6710 pt->vars_contains_nonlocal = true;
6711 }
6712 }
6713 }
6714
6715
6716 /* Compute the points-to solution *PT for the variable VI. */
6717
6718 static struct pt_solution
find_what_var_points_to(tree fndecl,varinfo_t orig_vi)6719 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6720 {
6721 unsigned int i;
6722 bitmap_iterator bi;
6723 bitmap finished_solution;
6724 bitmap result;
6725 varinfo_t vi;
6726 struct pt_solution *pt;
6727
6728 /* This variable may have been collapsed, let's get the real
6729 variable. */
6730 vi = get_varinfo (find (orig_vi->id));
6731
6732 /* See if we have already computed the solution and return it. */
6733 pt_solution **slot = &final_solutions->get_or_insert (vi);
6734 if (*slot != NULL)
6735 return **slot;
6736
6737 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6738 memset (pt, 0, sizeof (struct pt_solution));
6739
6740 /* Translate artificial variables into SSA_NAME_PTR_INFO
6741 attributes. */
6742 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6743 {
6744 varinfo_t vi = get_varinfo (i);
6745
6746 if (vi->is_artificial_var)
6747 {
6748 if (vi->id == nothing_id)
6749 pt->null = 1;
6750 else if (vi->id == escaped_id)
6751 {
6752 if (in_ipa_mode)
6753 pt->ipa_escaped = 1;
6754 else
6755 pt->escaped = 1;
6756 /* Expand some special vars of ESCAPED in-place here. */
6757 varinfo_t evi = get_varinfo (find (escaped_id));
6758 if (bitmap_bit_p (evi->solution, nonlocal_id))
6759 pt->nonlocal = 1;
6760 }
6761 else if (vi->id == nonlocal_id)
6762 pt->nonlocal = 1;
6763 else if (vi->id == string_id)
6764 /* Nobody cares - STRING_CSTs are read-only entities. */
6765 ;
6766 else if (vi->id == anything_id
6767 || vi->id == integer_id)
6768 pt->anything = 1;
6769 }
6770 }
6771
6772 /* Instead of doing extra work, simply do not create
6773 elaborate points-to information for pt_anything pointers. */
6774 if (pt->anything)
6775 return *pt;
6776
6777 /* Share the final set of variables when possible. */
6778 finished_solution = BITMAP_GGC_ALLOC ();
6779 stats.points_to_sets_created++;
6780
6781 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6782 result = shared_bitmap_lookup (finished_solution);
6783 if (!result)
6784 {
6785 shared_bitmap_add (finished_solution);
6786 pt->vars = finished_solution;
6787 }
6788 else
6789 {
6790 pt->vars = result;
6791 bitmap_clear (finished_solution);
6792 }
6793
6794 return *pt;
6795 }
6796
6797 /* Given a pointer variable P, fill in its points-to set. */
6798
6799 static void
find_what_p_points_to(tree fndecl,tree p)6800 find_what_p_points_to (tree fndecl, tree p)
6801 {
6802 struct ptr_info_def *pi;
6803 tree lookup_p = p;
6804 varinfo_t vi;
6805 value_range vr;
6806 get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (vr, p);
6807 bool nonnull = vr.nonzero_p ();
6808
6809 /* For parameters, get at the points-to set for the actual parm
6810 decl. */
6811 if (TREE_CODE (p) == SSA_NAME
6812 && SSA_NAME_IS_DEFAULT_DEF (p)
6813 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6814 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6815 lookup_p = SSA_NAME_VAR (p);
6816
6817 vi = lookup_vi_for_tree (lookup_p);
6818 if (!vi)
6819 return;
6820
6821 pi = get_ptr_info (p);
6822 pi->pt = find_what_var_points_to (fndecl, vi);
6823 /* Conservatively set to NULL from PTA (to true). */
6824 pi->pt.null = 1;
6825 /* Preserve pointer nonnull globally computed. */
6826 if (nonnull)
6827 set_ptr_nonnull (p);
6828 }
6829
6830
6831 /* Query statistics for points-to solutions. */
6832
6833 static struct {
6834 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6835 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6836 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6837 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6838 } pta_stats;
6839
6840 void
dump_pta_stats(FILE * s)6841 dump_pta_stats (FILE *s)
6842 {
6843 fprintf (s, "\nPTA query stats:\n");
6844 fprintf (s, " pt_solution_includes: "
6845 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6846 HOST_WIDE_INT_PRINT_DEC" queries\n",
6847 pta_stats.pt_solution_includes_no_alias,
6848 pta_stats.pt_solution_includes_no_alias
6849 + pta_stats.pt_solution_includes_may_alias);
6850 fprintf (s, " pt_solutions_intersect: "
6851 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6852 HOST_WIDE_INT_PRINT_DEC" queries\n",
6853 pta_stats.pt_solutions_intersect_no_alias,
6854 pta_stats.pt_solutions_intersect_no_alias
6855 + pta_stats.pt_solutions_intersect_may_alias);
6856 }
6857
6858
6859 /* Reset the points-to solution *PT to a conservative default
6860 (point to anything). */
6861
6862 void
pt_solution_reset(struct pt_solution * pt)6863 pt_solution_reset (struct pt_solution *pt)
6864 {
6865 memset (pt, 0, sizeof (struct pt_solution));
6866 pt->anything = true;
6867 pt->null = true;
6868 }
6869
6870 /* Set the points-to solution *PT to point only to the variables
6871 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6872 global variables and VARS_CONTAINS_RESTRICT specifies whether
6873 it contains restrict tag variables. */
6874
6875 void
pt_solution_set(struct pt_solution * pt,bitmap vars,bool vars_contains_nonlocal)6876 pt_solution_set (struct pt_solution *pt, bitmap vars,
6877 bool vars_contains_nonlocal)
6878 {
6879 memset (pt, 0, sizeof (struct pt_solution));
6880 pt->vars = vars;
6881 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6882 pt->vars_contains_escaped
6883 = (cfun->gimple_df->escaped.anything
6884 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6885 }
6886
6887 /* Set the points-to solution *PT to point only to the variable VAR. */
6888
6889 void
pt_solution_set_var(struct pt_solution * pt,tree var)6890 pt_solution_set_var (struct pt_solution *pt, tree var)
6891 {
6892 memset (pt, 0, sizeof (struct pt_solution));
6893 pt->vars = BITMAP_GGC_ALLOC ();
6894 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6895 pt->vars_contains_nonlocal = is_global_var (var);
6896 pt->vars_contains_escaped
6897 = (cfun->gimple_df->escaped.anything
6898 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6899 }
6900
6901 /* Computes the union of the points-to solutions *DEST and *SRC and
6902 stores the result in *DEST. This changes the points-to bitmap
6903 of *DEST and thus may not be used if that might be shared.
6904 The points-to bitmap of *SRC and *DEST will not be shared after
6905 this function if they were not before. */
6906
6907 static void
pt_solution_ior_into(struct pt_solution * dest,struct pt_solution * src)6908 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6909 {
6910 dest->anything |= src->anything;
6911 if (dest->anything)
6912 {
6913 pt_solution_reset (dest);
6914 return;
6915 }
6916
6917 dest->nonlocal |= src->nonlocal;
6918 dest->escaped |= src->escaped;
6919 dest->ipa_escaped |= src->ipa_escaped;
6920 dest->null |= src->null;
6921 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6922 dest->vars_contains_escaped |= src->vars_contains_escaped;
6923 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6924 if (!src->vars)
6925 return;
6926
6927 if (!dest->vars)
6928 dest->vars = BITMAP_GGC_ALLOC ();
6929 bitmap_ior_into (dest->vars, src->vars);
6930 }
6931
6932 /* Return true if the points-to solution *PT is empty. */
6933
6934 bool
pt_solution_empty_p(const pt_solution * pt)6935 pt_solution_empty_p (const pt_solution *pt)
6936 {
6937 if (pt->anything
6938 || pt->nonlocal)
6939 return false;
6940
6941 if (pt->vars
6942 && !bitmap_empty_p (pt->vars))
6943 return false;
6944
6945 /* If the solution includes ESCAPED, check if that is empty. */
6946 if (pt->escaped
6947 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6948 return false;
6949
6950 /* If the solution includes ESCAPED, check if that is empty. */
6951 if (pt->ipa_escaped
6952 && !pt_solution_empty_p (&ipa_escaped_pt))
6953 return false;
6954
6955 return true;
6956 }
6957
6958 /* Return true if the points-to solution *PT only point to a single var, and
6959 return the var uid in *UID. */
6960
6961 bool
pt_solution_singleton_or_null_p(struct pt_solution * pt,unsigned * uid)6962 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6963 {
6964 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6965 || pt->vars == NULL
6966 || !bitmap_single_bit_set_p (pt->vars))
6967 return false;
6968
6969 *uid = bitmap_first_set_bit (pt->vars);
6970 return true;
6971 }
6972
6973 /* Return true if the points-to solution *PT includes global memory.
6974 If ESCAPED_LOCAL_P is true then escaped local variables are also
6975 considered global. */
6976
6977 bool
pt_solution_includes_global(struct pt_solution * pt,bool escaped_local_p)6978 pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p)
6979 {
6980 if (pt->anything
6981 || pt->nonlocal
6982 || pt->vars_contains_nonlocal
6983 /* The following is a hack to make the malloc escape hack work.
6984 In reality we'd need different sets for escaped-through-return
6985 and escaped-to-callees and passes would need to be updated. */
6986 || pt->vars_contains_escaped_heap)
6987 return true;
6988
6989 if (escaped_local_p && pt->vars_contains_escaped)
6990 return true;
6991
6992 /* 'escaped' is also a placeholder so we have to look into it. */
6993 if (pt->escaped)
6994 return pt_solution_includes_global (&cfun->gimple_df->escaped,
6995 escaped_local_p);
6996
6997 if (pt->ipa_escaped)
6998 return pt_solution_includes_global (&ipa_escaped_pt,
6999 escaped_local_p);
7000
7001 return false;
7002 }
7003
7004 /* Return true if the points-to solution *PT includes the variable
7005 declaration DECL. */
7006
7007 static bool
pt_solution_includes_1(struct pt_solution * pt,const_tree decl)7008 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
7009 {
7010 if (pt->anything)
7011 return true;
7012
7013 if (pt->nonlocal
7014 && is_global_var (decl))
7015 return true;
7016
7017 if (pt->vars
7018 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
7019 return true;
7020
7021 /* If the solution includes ESCAPED, check it. */
7022 if (pt->escaped
7023 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
7024 return true;
7025
7026 /* If the solution includes ESCAPED, check it. */
7027 if (pt->ipa_escaped
7028 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
7029 return true;
7030
7031 return false;
7032 }
7033
7034 bool
pt_solution_includes(struct pt_solution * pt,const_tree decl)7035 pt_solution_includes (struct pt_solution *pt, const_tree decl)
7036 {
7037 bool res = pt_solution_includes_1 (pt, decl);
7038 if (res)
7039 ++pta_stats.pt_solution_includes_may_alias;
7040 else
7041 ++pta_stats.pt_solution_includes_no_alias;
7042 return res;
7043 }
7044
7045 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
7046 intersection. */
7047
7048 static bool
pt_solutions_intersect_1(struct pt_solution * pt1,struct pt_solution * pt2)7049 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
7050 {
7051 if (pt1->anything || pt2->anything)
7052 return true;
7053
7054 /* If either points to unknown global memory and the other points to
7055 any global memory they alias. */
7056 if ((pt1->nonlocal
7057 && (pt2->nonlocal
7058 || pt2->vars_contains_nonlocal))
7059 || (pt2->nonlocal
7060 && pt1->vars_contains_nonlocal))
7061 return true;
7062
7063 /* If either points to all escaped memory and the other points to
7064 any escaped memory they alias. */
7065 if ((pt1->escaped
7066 && (pt2->escaped
7067 || pt2->vars_contains_escaped))
7068 || (pt2->escaped
7069 && pt1->vars_contains_escaped))
7070 return true;
7071
7072 /* Check the escaped solution if required.
7073 ??? Do we need to check the local against the IPA escaped sets? */
7074 if ((pt1->ipa_escaped || pt2->ipa_escaped)
7075 && !pt_solution_empty_p (&ipa_escaped_pt))
7076 {
7077 /* If both point to escaped memory and that solution
7078 is not empty they alias. */
7079 if (pt1->ipa_escaped && pt2->ipa_escaped)
7080 return true;
7081
7082 /* If either points to escaped memory see if the escaped solution
7083 intersects with the other. */
7084 if ((pt1->ipa_escaped
7085 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
7086 || (pt2->ipa_escaped
7087 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
7088 return true;
7089 }
7090
7091 /* Now both pointers alias if their points-to solution intersects. */
7092 return (pt1->vars
7093 && pt2->vars
7094 && bitmap_intersect_p (pt1->vars, pt2->vars));
7095 }
7096
7097 bool
pt_solutions_intersect(struct pt_solution * pt1,struct pt_solution * pt2)7098 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
7099 {
7100 bool res = pt_solutions_intersect_1 (pt1, pt2);
7101 if (res)
7102 ++pta_stats.pt_solutions_intersect_may_alias;
7103 else
7104 ++pta_stats.pt_solutions_intersect_no_alias;
7105 return res;
7106 }
7107
7108
7109 /* Dump points-to information to OUTFILE. */
7110
7111 static void
dump_sa_points_to_info(FILE * outfile)7112 dump_sa_points_to_info (FILE *outfile)
7113 {
7114 unsigned int i;
7115
7116 fprintf (outfile, "\nPoints-to sets\n\n");
7117
7118 if (dump_flags & TDF_STATS)
7119 {
7120 fprintf (outfile, "Stats:\n");
7121 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
7122 fprintf (outfile, "Non-pointer vars: %d\n",
7123 stats.nonpointer_vars);
7124 fprintf (outfile, "Statically unified vars: %d\n",
7125 stats.unified_vars_static);
7126 fprintf (outfile, "Dynamically unified vars: %d\n",
7127 stats.unified_vars_dynamic);
7128 fprintf (outfile, "Iterations: %d\n", stats.iterations);
7129 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
7130 fprintf (outfile, "Number of implicit edges: %d\n",
7131 stats.num_implicit_edges);
7132 }
7133
7134 for (i = 1; i < varmap.length (); i++)
7135 {
7136 varinfo_t vi = get_varinfo (i);
7137 if (!vi->may_have_pointers)
7138 continue;
7139 dump_solution_for_var (outfile, i);
7140 }
7141 }
7142
7143
7144 /* Debug points-to information to stderr. */
7145
7146 DEBUG_FUNCTION void
debug_sa_points_to_info(void)7147 debug_sa_points_to_info (void)
7148 {
7149 dump_sa_points_to_info (stderr);
7150 }
7151
7152
7153 /* Initialize the always-existing constraint variables for NULL
7154 ANYTHING, READONLY, and INTEGER */
7155
7156 static void
init_base_vars(void)7157 init_base_vars (void)
7158 {
7159 struct constraint_expr lhs, rhs;
7160 varinfo_t var_anything;
7161 varinfo_t var_nothing;
7162 varinfo_t var_string;
7163 varinfo_t var_escaped;
7164 varinfo_t var_nonlocal;
7165 varinfo_t var_storedanything;
7166 varinfo_t var_integer;
7167
7168 /* Variable ID zero is reserved and should be NULL. */
7169 varmap.safe_push (NULL);
7170
7171 /* Create the NULL variable, used to represent that a variable points
7172 to NULL. */
7173 var_nothing = new_var_info (NULL_TREE, "NULL", false);
7174 gcc_assert (var_nothing->id == nothing_id);
7175 var_nothing->is_artificial_var = 1;
7176 var_nothing->offset = 0;
7177 var_nothing->size = ~0;
7178 var_nothing->fullsize = ~0;
7179 var_nothing->is_special_var = 1;
7180 var_nothing->may_have_pointers = 0;
7181 var_nothing->is_global_var = 0;
7182
7183 /* Create the ANYTHING variable, used to represent that a variable
7184 points to some unknown piece of memory. */
7185 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7186 gcc_assert (var_anything->id == anything_id);
7187 var_anything->is_artificial_var = 1;
7188 var_anything->size = ~0;
7189 var_anything->offset = 0;
7190 var_anything->fullsize = ~0;
7191 var_anything->is_special_var = 1;
7192
7193 /* Anything points to anything. This makes deref constraints just
7194 work in the presence of linked list and other p = *p type loops,
7195 by saying that *ANYTHING = ANYTHING. */
7196 lhs.type = SCALAR;
7197 lhs.var = anything_id;
7198 lhs.offset = 0;
7199 rhs.type = ADDRESSOF;
7200 rhs.var = anything_id;
7201 rhs.offset = 0;
7202
7203 /* This specifically does not use process_constraint because
7204 process_constraint ignores all anything = anything constraints, since all
7205 but this one are redundant. */
7206 constraints.safe_push (new_constraint (lhs, rhs));
7207
7208 /* Create the STRING variable, used to represent that a variable
7209 points to a string literal. String literals don't contain
7210 pointers so STRING doesn't point to anything. */
7211 var_string = new_var_info (NULL_TREE, "STRING", false);
7212 gcc_assert (var_string->id == string_id);
7213 var_string->is_artificial_var = 1;
7214 var_string->offset = 0;
7215 var_string->size = ~0;
7216 var_string->fullsize = ~0;
7217 var_string->is_special_var = 1;
7218 var_string->may_have_pointers = 0;
7219
7220 /* Create the ESCAPED variable, used to represent the set of escaped
7221 memory. */
7222 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7223 gcc_assert (var_escaped->id == escaped_id);
7224 var_escaped->is_artificial_var = 1;
7225 var_escaped->offset = 0;
7226 var_escaped->size = ~0;
7227 var_escaped->fullsize = ~0;
7228 var_escaped->is_special_var = 0;
7229
7230 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7231 memory. */
7232 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7233 gcc_assert (var_nonlocal->id == nonlocal_id);
7234 var_nonlocal->is_artificial_var = 1;
7235 var_nonlocal->offset = 0;
7236 var_nonlocal->size = ~0;
7237 var_nonlocal->fullsize = ~0;
7238 var_nonlocal->is_special_var = 1;
7239
7240 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7241 lhs.type = SCALAR;
7242 lhs.var = escaped_id;
7243 lhs.offset = 0;
7244 rhs.type = DEREF;
7245 rhs.var = escaped_id;
7246 rhs.offset = 0;
7247 process_constraint (new_constraint (lhs, rhs));
7248
7249 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7250 whole variable escapes. */
7251 lhs.type = SCALAR;
7252 lhs.var = escaped_id;
7253 lhs.offset = 0;
7254 rhs.type = SCALAR;
7255 rhs.var = escaped_id;
7256 rhs.offset = UNKNOWN_OFFSET;
7257 process_constraint (new_constraint (lhs, rhs));
7258
7259 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7260 everything pointed to by escaped points to what global memory can
7261 point to. */
7262 lhs.type = DEREF;
7263 lhs.var = escaped_id;
7264 lhs.offset = 0;
7265 rhs.type = SCALAR;
7266 rhs.var = nonlocal_id;
7267 rhs.offset = 0;
7268 process_constraint (new_constraint (lhs, rhs));
7269
7270 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7271 global memory may point to global memory and escaped memory. */
7272 lhs.type = SCALAR;
7273 lhs.var = nonlocal_id;
7274 lhs.offset = 0;
7275 rhs.type = ADDRESSOF;
7276 rhs.var = nonlocal_id;
7277 rhs.offset = 0;
7278 process_constraint (new_constraint (lhs, rhs));
7279 rhs.type = ADDRESSOF;
7280 rhs.var = escaped_id;
7281 rhs.offset = 0;
7282 process_constraint (new_constraint (lhs, rhs));
7283
7284 /* Create the STOREDANYTHING variable, used to represent the set of
7285 variables stored to *ANYTHING. */
7286 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7287 gcc_assert (var_storedanything->id == storedanything_id);
7288 var_storedanything->is_artificial_var = 1;
7289 var_storedanything->offset = 0;
7290 var_storedanything->size = ~0;
7291 var_storedanything->fullsize = ~0;
7292 var_storedanything->is_special_var = 0;
7293
7294 /* Create the INTEGER variable, used to represent that a variable points
7295 to what an INTEGER "points to". */
7296 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7297 gcc_assert (var_integer->id == integer_id);
7298 var_integer->is_artificial_var = 1;
7299 var_integer->size = ~0;
7300 var_integer->fullsize = ~0;
7301 var_integer->offset = 0;
7302 var_integer->is_special_var = 1;
7303
7304 /* INTEGER = ANYTHING, because we don't know where a dereference of
7305 a random integer will point to. */
7306 lhs.type = SCALAR;
7307 lhs.var = integer_id;
7308 lhs.offset = 0;
7309 rhs.type = ADDRESSOF;
7310 rhs.var = anything_id;
7311 rhs.offset = 0;
7312 process_constraint (new_constraint (lhs, rhs));
7313 }
7314
7315 /* Initialize things necessary to perform PTA */
7316
7317 static void
init_alias_vars(void)7318 init_alias_vars (void)
7319 {
7320 use_field_sensitive = (param_max_fields_for_field_sensitive > 1);
7321
7322 bitmap_obstack_initialize (&pta_obstack);
7323 bitmap_obstack_initialize (&oldpta_obstack);
7324 bitmap_obstack_initialize (&predbitmap_obstack);
7325
7326 constraints.create (8);
7327 varmap.create (8);
7328 vi_for_tree = new hash_map<tree, varinfo_t>;
7329 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7330
7331 memset (&stats, 0, sizeof (stats));
7332 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7333 init_base_vars ();
7334
7335 gcc_obstack_init (&fake_var_decl_obstack);
7336
7337 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7338 gcc_obstack_init (&final_solutions_obstack);
7339 }
7340
7341 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7342 predecessor edges. */
7343
7344 static void
remove_preds_and_fake_succs(constraint_graph_t graph)7345 remove_preds_and_fake_succs (constraint_graph_t graph)
7346 {
7347 unsigned int i;
7348
7349 /* Clear the implicit ref and address nodes from the successor
7350 lists. */
7351 for (i = 1; i < FIRST_REF_NODE; i++)
7352 {
7353 if (graph->succs[i])
7354 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7355 FIRST_REF_NODE * 2);
7356 }
7357
7358 /* Free the successor list for the non-ref nodes. */
7359 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7360 {
7361 if (graph->succs[i])
7362 BITMAP_FREE (graph->succs[i]);
7363 }
7364
7365 /* Now reallocate the size of the successor list as, and blow away
7366 the predecessor bitmaps. */
7367 graph->size = varmap.length ();
7368 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7369
7370 free (graph->implicit_preds);
7371 graph->implicit_preds = NULL;
7372 free (graph->preds);
7373 graph->preds = NULL;
7374 bitmap_obstack_release (&predbitmap_obstack);
7375 }
7376
7377 /* Solve the constraint set. */
7378
7379 static void
solve_constraints(void)7380 solve_constraints (void)
7381 {
7382 class scc_info *si;
7383
7384 /* Sort varinfos so that ones that cannot be pointed to are last.
7385 This makes bitmaps more efficient. */
7386 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7387 for (unsigned i = 0; i < integer_id + 1; ++i)
7388 map[i] = i;
7389 /* Start with address-taken vars, followed by not address-taken vars
7390 to move vars never appearing in the points-to solution bitmaps last. */
7391 unsigned j = integer_id + 1;
7392 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7393 if (varmap[varmap[i]->head]->address_taken)
7394 map[i] = j++;
7395 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7396 if (! varmap[varmap[i]->head]->address_taken)
7397 map[i] = j++;
7398 /* Shuffle varmap according to map. */
7399 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7400 {
7401 while (map[varmap[i]->id] != i)
7402 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7403 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7404 varmap[i]->id = i;
7405 varmap[i]->next = map[varmap[i]->next];
7406 varmap[i]->head = map[varmap[i]->head];
7407 }
7408 /* Finally rewrite constraints. */
7409 for (unsigned i = 0; i < constraints.length (); ++i)
7410 {
7411 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7412 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7413 }
7414 free (map);
7415
7416 if (dump_file)
7417 fprintf (dump_file,
7418 "\nCollapsing static cycles and doing variable "
7419 "substitution\n");
7420
7421 init_graph (varmap.length () * 2);
7422
7423 if (dump_file)
7424 fprintf (dump_file, "Building predecessor graph\n");
7425 build_pred_graph ();
7426
7427 if (dump_file)
7428 fprintf (dump_file, "Detecting pointer and location "
7429 "equivalences\n");
7430 si = perform_var_substitution (graph);
7431
7432 if (dump_file)
7433 fprintf (dump_file, "Rewriting constraints and unifying "
7434 "variables\n");
7435 rewrite_constraints (graph, si);
7436
7437 build_succ_graph ();
7438
7439 free_var_substitution_info (si);
7440
7441 /* Attach complex constraints to graph nodes. */
7442 move_complex_constraints (graph);
7443
7444 if (dump_file)
7445 fprintf (dump_file, "Uniting pointer but not location equivalent "
7446 "variables\n");
7447 unite_pointer_equivalences (graph);
7448
7449 if (dump_file)
7450 fprintf (dump_file, "Finding indirect cycles\n");
7451 find_indirect_cycles (graph);
7452
7453 /* Implicit nodes and predecessors are no longer necessary at this
7454 point. */
7455 remove_preds_and_fake_succs (graph);
7456
7457 if (dump_file && (dump_flags & TDF_GRAPH))
7458 {
7459 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7460 "in dot format:\n");
7461 dump_constraint_graph (dump_file);
7462 fprintf (dump_file, "\n\n");
7463 }
7464
7465 if (dump_file)
7466 fprintf (dump_file, "Solving graph\n");
7467
7468 solve_graph (graph);
7469
7470 if (dump_file && (dump_flags & TDF_GRAPH))
7471 {
7472 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7473 "in dot format:\n");
7474 dump_constraint_graph (dump_file);
7475 fprintf (dump_file, "\n\n");
7476 }
7477 }
7478
7479 /* Create points-to sets for the current function. See the comments
7480 at the start of the file for an algorithmic overview. */
7481
7482 static void
compute_points_to_sets(void)7483 compute_points_to_sets (void)
7484 {
7485 basic_block bb;
7486 varinfo_t vi;
7487
7488 timevar_push (TV_TREE_PTA);
7489
7490 init_alias_vars ();
7491
7492 intra_create_variable_infos (cfun);
7493
7494 /* Now walk all statements and build the constraint set. */
7495 FOR_EACH_BB_FN (bb, cfun)
7496 {
7497 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7498 gsi_next (&gsi))
7499 {
7500 gphi *phi = gsi.phi ();
7501
7502 if (! virtual_operand_p (gimple_phi_result (phi)))
7503 find_func_aliases (cfun, phi);
7504 }
7505
7506 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7507 gsi_next (&gsi))
7508 {
7509 gimple *stmt = gsi_stmt (gsi);
7510
7511 find_func_aliases (cfun, stmt);
7512 }
7513 }
7514
7515 if (dump_file)
7516 {
7517 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7518 dump_constraints (dump_file, 0);
7519 }
7520
7521 /* From the constraints compute the points-to sets. */
7522 solve_constraints ();
7523
7524 /* Post-process solutions for escapes through returns. */
7525 edge_iterator ei;
7526 edge e;
7527 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7528 if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7529 {
7530 tree val = gimple_return_retval (ret);
7531 /* ??? Easy to handle simple indirections with some work.
7532 Arbitrary references like foo.bar.baz are more difficult
7533 (but conservatively easy enough with just looking at the base).
7534 Mind to fixup find_func_aliases as well. */
7535 if (!val || !SSA_VAR_P (val))
7536 continue;
7537 /* returns happen last in non-IPA so they only influence
7538 the ESCAPED solution and we can filter local variables. */
7539 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7540 varinfo_t vi = lookup_vi_for_tree (val);
7541 bitmap delta = BITMAP_ALLOC (&pta_obstack);
7542 bitmap_iterator bi;
7543 unsigned i;
7544 for (; vi; vi = vi_next (vi))
7545 {
7546 varinfo_t part_vi = get_varinfo (find (vi->id));
7547 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7548 escaped_vi->solution, 0, i, bi)
7549 {
7550 varinfo_t pointed_to_vi = get_varinfo (i);
7551 if (pointed_to_vi->is_global_var
7552 /* We delay marking of heap memory as global. */
7553 || pointed_to_vi->is_heap_var)
7554 bitmap_set_bit (delta, i);
7555 }
7556 }
7557
7558 /* Now compute the transitive closure. */
7559 bitmap_ior_into (escaped_vi->solution, delta);
7560 bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7561 while (!bitmap_empty_p (delta))
7562 {
7563 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7564 {
7565 varinfo_t pointed_to_vi = get_varinfo (i);
7566 pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7567 unsigned j;
7568 bitmap_iterator bi2;
7569 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7570 escaped_vi->solution,
7571 0, j, bi2)
7572 {
7573 varinfo_t pointed_to_vi2 = get_varinfo (j);
7574 if (pointed_to_vi2->is_global_var
7575 /* We delay marking of heap memory as global. */
7576 || pointed_to_vi2->is_heap_var)
7577 bitmap_set_bit (new_delta, j);
7578 }
7579 }
7580 bitmap_ior_into (escaped_vi->solution, new_delta);
7581 bitmap_clear (delta);
7582 std::swap (delta, new_delta);
7583 }
7584 BITMAP_FREE (delta);
7585 BITMAP_FREE (new_delta);
7586 }
7587
7588 if (dump_file)
7589 dump_sa_points_to_info (dump_file);
7590
7591 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7592 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7593 get_varinfo (escaped_id));
7594
7595 /* Make sure the ESCAPED solution (which is used as placeholder in
7596 other solutions) does not reference itself. This simplifies
7597 points-to solution queries. */
7598 cfun->gimple_df->escaped.escaped = 0;
7599
7600 /* Compute the points-to sets for pointer SSA_NAMEs. */
7601 unsigned i;
7602 tree ptr;
7603
7604 FOR_EACH_SSA_NAME (i, ptr, cfun)
7605 {
7606 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7607 find_what_p_points_to (cfun->decl, ptr);
7608 }
7609
7610 /* Compute the call-used/clobbered sets. */
7611 FOR_EACH_BB_FN (bb, cfun)
7612 {
7613 gimple_stmt_iterator gsi;
7614
7615 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7616 {
7617 gcall *stmt;
7618 struct pt_solution *pt;
7619
7620 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7621 if (!stmt)
7622 continue;
7623
7624 pt = gimple_call_use_set (stmt);
7625 if (gimple_call_flags (stmt) & ECF_CONST)
7626 memset (pt, 0, sizeof (struct pt_solution));
7627 else
7628 {
7629 bool uses_global_memory = true;
7630 bool reads_global_memory = true;
7631
7632 determine_global_memory_access (stmt, NULL,
7633 &reads_global_memory,
7634 &uses_global_memory);
7635 if ((vi = lookup_call_use_vi (stmt)) != NULL)
7636 {
7637 *pt = find_what_var_points_to (cfun->decl, vi);
7638 /* Escaped (and thus nonlocal) variables are always
7639 implicitly used by calls. */
7640 /* ??? ESCAPED can be empty even though NONLOCAL
7641 always escaped. */
7642 if (uses_global_memory)
7643 {
7644 pt->nonlocal = 1;
7645 pt->escaped = 1;
7646 }
7647 }
7648 else if (uses_global_memory)
7649 {
7650 /* If there is nothing special about this call then
7651 we have made everything that is used also escape. */
7652 *pt = cfun->gimple_df->escaped;
7653 pt->nonlocal = 1;
7654 }
7655 else
7656 memset (pt, 0, sizeof (struct pt_solution));
7657 }
7658
7659 pt = gimple_call_clobber_set (stmt);
7660 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7661 memset (pt, 0, sizeof (struct pt_solution));
7662 else
7663 {
7664 bool writes_global_memory = true;
7665
7666 determine_global_memory_access (stmt, &writes_global_memory,
7667 NULL, NULL);
7668
7669 if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7670 {
7671 *pt = find_what_var_points_to (cfun->decl, vi);
7672 /* Escaped (and thus nonlocal) variables are always
7673 implicitly clobbered by calls. */
7674 /* ??? ESCAPED can be empty even though NONLOCAL
7675 always escaped. */
7676 if (writes_global_memory)
7677 {
7678 pt->nonlocal = 1;
7679 pt->escaped = 1;
7680 }
7681 }
7682 else if (writes_global_memory)
7683 {
7684 /* If there is nothing special about this call then
7685 we have made everything that is used also escape. */
7686 *pt = cfun->gimple_df->escaped;
7687 pt->nonlocal = 1;
7688 }
7689 else
7690 memset (pt, 0, sizeof (struct pt_solution));
7691 }
7692 }
7693 }
7694
7695 timevar_pop (TV_TREE_PTA);
7696 }
7697
7698
7699 /* Delete created points-to sets. */
7700
7701 static void
delete_points_to_sets(void)7702 delete_points_to_sets (void)
7703 {
7704 unsigned int i;
7705
7706 delete shared_bitmap_table;
7707 shared_bitmap_table = NULL;
7708 if (dump_file && (dump_flags & TDF_STATS))
7709 fprintf (dump_file, "Points to sets created:%d\n",
7710 stats.points_to_sets_created);
7711
7712 delete vi_for_tree;
7713 delete call_stmt_vars;
7714 bitmap_obstack_release (&pta_obstack);
7715 constraints.release ();
7716
7717 for (i = 0; i < graph->size; i++)
7718 graph->complex[i].release ();
7719 free (graph->complex);
7720
7721 free (graph->rep);
7722 free (graph->succs);
7723 free (graph->pe);
7724 free (graph->pe_rep);
7725 free (graph->indirect_cycles);
7726 free (graph);
7727
7728 varmap.release ();
7729 variable_info_pool.release ();
7730 constraint_pool.release ();
7731
7732 obstack_free (&fake_var_decl_obstack, NULL);
7733
7734 delete final_solutions;
7735 obstack_free (&final_solutions_obstack, NULL);
7736 }
7737
7738 struct vls_data
7739 {
7740 unsigned short clique;
7741 bool escaped_p;
7742 bitmap rvars;
7743 };
7744
7745 /* Mark "other" loads and stores as belonging to CLIQUE and with
7746 base zero. */
7747
7748 static bool
visit_loadstore(gimple *,tree base,tree ref,void * data)7749 visit_loadstore (gimple *, tree base, tree ref, void *data)
7750 {
7751 unsigned short clique = ((vls_data *) data)->clique;
7752 bitmap rvars = ((vls_data *) data)->rvars;
7753 bool escaped_p = ((vls_data *) data)->escaped_p;
7754 if (TREE_CODE (base) == MEM_REF
7755 || TREE_CODE (base) == TARGET_MEM_REF)
7756 {
7757 tree ptr = TREE_OPERAND (base, 0);
7758 if (TREE_CODE (ptr) == SSA_NAME)
7759 {
7760 /* For parameters, get at the points-to set for the actual parm
7761 decl. */
7762 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7763 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7764 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7765 ptr = SSA_NAME_VAR (ptr);
7766
7767 /* We need to make sure 'ptr' doesn't include any of
7768 the restrict tags we added bases for in its points-to set. */
7769 varinfo_t vi = lookup_vi_for_tree (ptr);
7770 if (! vi)
7771 return false;
7772
7773 vi = get_varinfo (find (vi->id));
7774 if (bitmap_intersect_p (rvars, vi->solution)
7775 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7776 return false;
7777 }
7778
7779 /* Do not overwrite existing cliques (that includes clique, base
7780 pairs we just set). */
7781 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7782 {
7783 MR_DEPENDENCE_CLIQUE (base) = clique;
7784 MR_DEPENDENCE_BASE (base) = 0;
7785 }
7786 }
7787
7788 /* For plain decl accesses see whether they are accesses to globals
7789 and rewrite them to MEM_REFs with { clique, 0 }. */
7790 if (VAR_P (base)
7791 && is_global_var (base)
7792 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7793 ops callback. */
7794 && base != ref)
7795 {
7796 tree *basep = &ref;
7797 while (handled_component_p (*basep))
7798 basep = &TREE_OPERAND (*basep, 0);
7799 gcc_assert (VAR_P (*basep));
7800 tree ptr = build_fold_addr_expr (*basep);
7801 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7802 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7803 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7804 MR_DEPENDENCE_BASE (*basep) = 0;
7805 }
7806
7807 return false;
7808 }
7809
7810 struct msdi_data {
7811 tree ptr;
7812 unsigned short *clique;
7813 unsigned short *last_ruid;
7814 varinfo_t restrict_var;
7815 };
7816
7817 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7818 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7819 Return whether dependence info was assigned to BASE. */
7820
7821 static bool
maybe_set_dependence_info(gimple *,tree base,tree,void * data)7822 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7823 {
7824 tree ptr = ((msdi_data *)data)->ptr;
7825 unsigned short &clique = *((msdi_data *)data)->clique;
7826 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7827 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7828 if ((TREE_CODE (base) == MEM_REF
7829 || TREE_CODE (base) == TARGET_MEM_REF)
7830 && TREE_OPERAND (base, 0) == ptr)
7831 {
7832 /* Do not overwrite existing cliques. This avoids overwriting dependence
7833 info inlined from a function with restrict parameters inlined
7834 into a function with restrict parameters. This usually means we
7835 prefer to be precise in innermost loops. */
7836 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7837 {
7838 if (clique == 0)
7839 {
7840 if (cfun->last_clique == 0)
7841 cfun->last_clique = 1;
7842 clique = 1;
7843 }
7844 if (restrict_var->ruid == 0)
7845 restrict_var->ruid = ++last_ruid;
7846 MR_DEPENDENCE_CLIQUE (base) = clique;
7847 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7848 return true;
7849 }
7850 }
7851 return false;
7852 }
7853
7854 /* Clear dependence info for the clique DATA. */
7855
7856 static bool
clear_dependence_clique(gimple *,tree base,tree,void * data)7857 clear_dependence_clique (gimple *, tree base, tree, void *data)
7858 {
7859 unsigned short clique = (uintptr_t)data;
7860 if ((TREE_CODE (base) == MEM_REF
7861 || TREE_CODE (base) == TARGET_MEM_REF)
7862 && MR_DEPENDENCE_CLIQUE (base) == clique)
7863 {
7864 MR_DEPENDENCE_CLIQUE (base) = 0;
7865 MR_DEPENDENCE_BASE (base) = 0;
7866 }
7867
7868 return false;
7869 }
7870
7871 /* Compute the set of independend memory references based on restrict
7872 tags and their conservative propagation to the points-to sets. */
7873
7874 static void
compute_dependence_clique(void)7875 compute_dependence_clique (void)
7876 {
7877 /* First clear the special "local" clique. */
7878 basic_block bb;
7879 if (cfun->last_clique != 0)
7880 FOR_EACH_BB_FN (bb, cfun)
7881 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7882 !gsi_end_p (gsi); gsi_next (&gsi))
7883 {
7884 gimple *stmt = gsi_stmt (gsi);
7885 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7886 clear_dependence_clique,
7887 clear_dependence_clique);
7888 }
7889
7890 unsigned short clique = 0;
7891 unsigned short last_ruid = 0;
7892 bitmap rvars = BITMAP_ALLOC (NULL);
7893 bool escaped_p = false;
7894 for (unsigned i = 0; i < num_ssa_names; ++i)
7895 {
7896 tree ptr = ssa_name (i);
7897 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7898 continue;
7899
7900 /* Avoid all this when ptr is not dereferenced? */
7901 tree p = ptr;
7902 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7903 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7904 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7905 p = SSA_NAME_VAR (ptr);
7906 varinfo_t vi = lookup_vi_for_tree (p);
7907 if (!vi)
7908 continue;
7909 vi = get_varinfo (find (vi->id));
7910 bitmap_iterator bi;
7911 unsigned j;
7912 varinfo_t restrict_var = NULL;
7913 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7914 {
7915 varinfo_t oi = get_varinfo (j);
7916 if (oi->head != j)
7917 oi = get_varinfo (oi->head);
7918 if (oi->is_restrict_var)
7919 {
7920 if (restrict_var
7921 && restrict_var != oi)
7922 {
7923 if (dump_file && (dump_flags & TDF_DETAILS))
7924 {
7925 fprintf (dump_file, "found restrict pointed-to "
7926 "for ");
7927 print_generic_expr (dump_file, ptr);
7928 fprintf (dump_file, " but not exclusively\n");
7929 }
7930 restrict_var = NULL;
7931 break;
7932 }
7933 restrict_var = oi;
7934 }
7935 /* NULL is the only other valid points-to entry. */
7936 else if (oi->id != nothing_id)
7937 {
7938 restrict_var = NULL;
7939 break;
7940 }
7941 }
7942 /* Ok, found that ptr must(!) point to a single(!) restrict
7943 variable. */
7944 /* ??? PTA isn't really a proper propagation engine to compute
7945 this property.
7946 ??? We could handle merging of two restricts by unifying them. */
7947 if (restrict_var)
7948 {
7949 /* Now look at possible dereferences of ptr. */
7950 imm_use_iterator ui;
7951 gimple *use_stmt;
7952 bool used = false;
7953 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7954 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7955 used |= walk_stmt_load_store_ops (use_stmt, &data,
7956 maybe_set_dependence_info,
7957 maybe_set_dependence_info);
7958 if (used)
7959 {
7960 /* Add all subvars to the set of restrict pointed-to set. */
7961 for (unsigned sv = restrict_var->head; sv != 0;
7962 sv = get_varinfo (sv)->next)
7963 bitmap_set_bit (rvars, sv);
7964 varinfo_t escaped = get_varinfo (find (escaped_id));
7965 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7966 escaped_p = true;
7967 }
7968 }
7969 }
7970
7971 if (clique != 0)
7972 {
7973 /* Assign the BASE id zero to all accesses not based on a restrict
7974 pointer. That way they get disambiguated against restrict
7975 accesses but not against each other. */
7976 /* ??? For restricts derived from globals (thus not incoming
7977 parameters) we can't restrict scoping properly thus the following
7978 is too aggressive there. For now we have excluded those globals from
7979 getting into the MR_DEPENDENCE machinery. */
7980 vls_data data = { clique, escaped_p, rvars };
7981 basic_block bb;
7982 FOR_EACH_BB_FN (bb, cfun)
7983 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7984 !gsi_end_p (gsi); gsi_next (&gsi))
7985 {
7986 gimple *stmt = gsi_stmt (gsi);
7987 walk_stmt_load_store_ops (stmt, &data,
7988 visit_loadstore, visit_loadstore);
7989 }
7990 }
7991
7992 BITMAP_FREE (rvars);
7993 }
7994
7995 /* Compute points-to information for every SSA_NAME pointer in the
7996 current function and compute the transitive closure of escaped
7997 variables to re-initialize the call-clobber states of local variables. */
7998
7999 unsigned int
compute_may_aliases(void)8000 compute_may_aliases (void)
8001 {
8002 if (cfun->gimple_df->ipa_pta)
8003 {
8004 if (dump_file)
8005 {
8006 fprintf (dump_file, "\nNot re-computing points-to information "
8007 "because IPA points-to information is available.\n\n");
8008
8009 /* But still dump what we have remaining it. */
8010 dump_alias_info (dump_file);
8011 }
8012
8013 return 0;
8014 }
8015
8016 /* For each pointer P_i, determine the sets of variables that P_i may
8017 point-to. Compute the reachability set of escaped and call-used
8018 variables. */
8019 compute_points_to_sets ();
8020
8021 /* Debugging dumps. */
8022 if (dump_file)
8023 dump_alias_info (dump_file);
8024
8025 /* Compute restrict-based memory disambiguations. */
8026 compute_dependence_clique ();
8027
8028 /* Deallocate memory used by aliasing data structures and the internal
8029 points-to solution. */
8030 delete_points_to_sets ();
8031
8032 gcc_assert (!need_ssa_update_p (cfun));
8033
8034 return 0;
8035 }
8036
8037 /* A dummy pass to cause points-to information to be computed via
8038 TODO_rebuild_alias. */
8039
8040 namespace {
8041
8042 const pass_data pass_data_build_alias =
8043 {
8044 GIMPLE_PASS, /* type */
8045 "alias", /* name */
8046 OPTGROUP_NONE, /* optinfo_flags */
8047 TV_NONE, /* tv_id */
8048 ( PROP_cfg | PROP_ssa ), /* properties_required */
8049 0, /* properties_provided */
8050 0, /* properties_destroyed */
8051 0, /* todo_flags_start */
8052 TODO_rebuild_alias, /* todo_flags_finish */
8053 };
8054
8055 class pass_build_alias : public gimple_opt_pass
8056 {
8057 public:
pass_build_alias(gcc::context * ctxt)8058 pass_build_alias (gcc::context *ctxt)
8059 : gimple_opt_pass (pass_data_build_alias, ctxt)
8060 {}
8061
8062 /* opt_pass methods: */
gate(function *)8063 virtual bool gate (function *) { return flag_tree_pta; }
8064
8065 }; // class pass_build_alias
8066
8067 } // anon namespace
8068
8069 gimple_opt_pass *
make_pass_build_alias(gcc::context * ctxt)8070 make_pass_build_alias (gcc::context *ctxt)
8071 {
8072 return new pass_build_alias (ctxt);
8073 }
8074
8075 /* A dummy pass to cause points-to information to be computed via
8076 TODO_rebuild_alias. */
8077
8078 namespace {
8079
8080 const pass_data pass_data_build_ealias =
8081 {
8082 GIMPLE_PASS, /* type */
8083 "ealias", /* name */
8084 OPTGROUP_NONE, /* optinfo_flags */
8085 TV_NONE, /* tv_id */
8086 ( PROP_cfg | PROP_ssa ), /* properties_required */
8087 0, /* properties_provided */
8088 0, /* properties_destroyed */
8089 0, /* todo_flags_start */
8090 TODO_rebuild_alias, /* todo_flags_finish */
8091 };
8092
8093 class pass_build_ealias : public gimple_opt_pass
8094 {
8095 public:
pass_build_ealias(gcc::context * ctxt)8096 pass_build_ealias (gcc::context *ctxt)
8097 : gimple_opt_pass (pass_data_build_ealias, ctxt)
8098 {}
8099
8100 /* opt_pass methods: */
gate(function *)8101 virtual bool gate (function *) { return flag_tree_pta; }
8102
8103 }; // class pass_build_ealias
8104
8105 } // anon namespace
8106
8107 gimple_opt_pass *
make_pass_build_ealias(gcc::context * ctxt)8108 make_pass_build_ealias (gcc::context *ctxt)
8109 {
8110 return new pass_build_ealias (ctxt);
8111 }
8112
8113
8114 /* IPA PTA solutions for ESCAPED. */
8115 struct pt_solution ipa_escaped_pt
8116 = { true, false, false, false, false,
8117 false, false, false, false, false, NULL };
8118
8119 /* Associate node with varinfo DATA. Worker for
8120 cgraph_for_symbol_thunks_and_aliases. */
8121 static bool
associate_varinfo_to_alias(struct cgraph_node * node,void * data)8122 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
8123 {
8124 if ((node->alias
8125 || (node->thunk
8126 && ! node->inlined_to))
8127 && node->analyzed
8128 && !node->ifunc_resolver)
8129 insert_vi_for_tree (node->decl, (varinfo_t)data);
8130 return false;
8131 }
8132
8133 /* Dump varinfo VI to FILE. */
8134
8135 static void
dump_varinfo(FILE * file,varinfo_t vi)8136 dump_varinfo (FILE *file, varinfo_t vi)
8137 {
8138 if (vi == NULL)
8139 return;
8140
8141 fprintf (file, "%u: %s\n", vi->id, vi->name);
8142
8143 const char *sep = " ";
8144 if (vi->is_artificial_var)
8145 fprintf (file, "%sartificial", sep);
8146 if (vi->is_special_var)
8147 fprintf (file, "%sspecial", sep);
8148 if (vi->is_unknown_size_var)
8149 fprintf (file, "%sunknown-size", sep);
8150 if (vi->is_full_var)
8151 fprintf (file, "%sfull", sep);
8152 if (vi->is_heap_var)
8153 fprintf (file, "%sheap", sep);
8154 if (vi->may_have_pointers)
8155 fprintf (file, "%smay-have-pointers", sep);
8156 if (vi->only_restrict_pointers)
8157 fprintf (file, "%sonly-restrict-pointers", sep);
8158 if (vi->is_restrict_var)
8159 fprintf (file, "%sis-restrict-var", sep);
8160 if (vi->is_global_var)
8161 fprintf (file, "%sglobal", sep);
8162 if (vi->is_ipa_escape_point)
8163 fprintf (file, "%sipa-escape-point", sep);
8164 if (vi->is_fn_info)
8165 fprintf (file, "%sfn-info", sep);
8166 if (vi->ruid)
8167 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8168 if (vi->next)
8169 fprintf (file, "%snext:%u", sep, vi->next);
8170 if (vi->head != vi->id)
8171 fprintf (file, "%shead:%u", sep, vi->head);
8172 if (vi->offset)
8173 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8174 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
8175 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8176 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
8177 && vi->fullsize != vi->size)
8178 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8179 vi->fullsize);
8180 fprintf (file, "\n");
8181
8182 if (vi->solution && !bitmap_empty_p (vi->solution))
8183 {
8184 bitmap_iterator bi;
8185 unsigned i;
8186 fprintf (file, " solution: {");
8187 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8188 fprintf (file, " %u", i);
8189 fprintf (file, " }\n");
8190 }
8191
8192 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8193 && !bitmap_equal_p (vi->solution, vi->oldsolution))
8194 {
8195 bitmap_iterator bi;
8196 unsigned i;
8197 fprintf (file, " oldsolution: {");
8198 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8199 fprintf (file, " %u", i);
8200 fprintf (file, " }\n");
8201 }
8202 }
8203
8204 /* Dump varinfo VI to stderr. */
8205
8206 DEBUG_FUNCTION void
debug_varinfo(varinfo_t vi)8207 debug_varinfo (varinfo_t vi)
8208 {
8209 dump_varinfo (stderr, vi);
8210 }
8211
8212 /* Dump varmap to FILE. */
8213
8214 static void
dump_varmap(FILE * file)8215 dump_varmap (FILE *file)
8216 {
8217 if (varmap.length () == 0)
8218 return;
8219
8220 fprintf (file, "variables:\n");
8221
8222 for (unsigned int i = 0; i < varmap.length (); ++i)
8223 {
8224 varinfo_t vi = get_varinfo (i);
8225 dump_varinfo (file, vi);
8226 }
8227
8228 fprintf (file, "\n");
8229 }
8230
8231 /* Dump varmap to stderr. */
8232
8233 DEBUG_FUNCTION void
debug_varmap(void)8234 debug_varmap (void)
8235 {
8236 dump_varmap (stderr);
8237 }
8238
8239 /* Compute whether node is refered to non-locally. Worker for
8240 cgraph_for_symbol_thunks_and_aliases. */
8241 static bool
refered_from_nonlocal_fn(struct cgraph_node * node,void * data)8242 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8243 {
8244 bool *nonlocal_p = (bool *)data;
8245 *nonlocal_p |= (node->used_from_other_partition
8246 || DECL_EXTERNAL (node->decl)
8247 || TREE_PUBLIC (node->decl)
8248 || node->force_output
8249 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8250 return false;
8251 }
8252
8253 /* Same for varpool nodes. */
8254 static bool
refered_from_nonlocal_var(struct varpool_node * node,void * data)8255 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8256 {
8257 bool *nonlocal_p = (bool *)data;
8258 *nonlocal_p |= (node->used_from_other_partition
8259 || DECL_EXTERNAL (node->decl)
8260 || TREE_PUBLIC (node->decl)
8261 || node->force_output);
8262 return false;
8263 }
8264
8265 /* Execute the driver for IPA PTA. */
8266 static unsigned int
ipa_pta_execute(void)8267 ipa_pta_execute (void)
8268 {
8269 struct cgraph_node *node;
8270 varpool_node *var;
8271 unsigned int from = 0;
8272
8273 in_ipa_mode = 1;
8274
8275 init_alias_vars ();
8276
8277 if (dump_file && (dump_flags & TDF_DETAILS))
8278 {
8279 symtab->dump (dump_file);
8280 fprintf (dump_file, "\n");
8281 }
8282
8283 if (dump_file)
8284 {
8285 fprintf (dump_file, "Generating generic constraints\n\n");
8286 dump_constraints (dump_file, from);
8287 fprintf (dump_file, "\n");
8288 from = constraints.length ();
8289 }
8290
8291 /* Build the constraints. */
8292 FOR_EACH_DEFINED_FUNCTION (node)
8293 {
8294 varinfo_t vi;
8295 /* Nodes without a body in this partition are not interesting.
8296 Especially do not visit clones at this point for now - we
8297 get duplicate decls there for inline clones at least. */
8298 if (!node->has_gimple_body_p ()
8299 || node->in_other_partition
8300 || node->inlined_to)
8301 continue;
8302 node->get_body ();
8303
8304 gcc_assert (!node->clone_of);
8305
8306 /* For externally visible or attribute used annotated functions use
8307 local constraints for their arguments.
8308 For local functions we see all callers and thus do not need initial
8309 constraints for parameters. */
8310 bool nonlocal_p = (node->used_from_other_partition
8311 || DECL_EXTERNAL (node->decl)
8312 || TREE_PUBLIC (node->decl)
8313 || node->force_output
8314 || lookup_attribute ("noipa",
8315 DECL_ATTRIBUTES (node->decl)));
8316 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8317 &nonlocal_p, true);
8318
8319 vi = create_function_info_for (node->decl,
8320 alias_get_name (node->decl), false,
8321 nonlocal_p);
8322 if (dump_file
8323 && from != constraints.length ())
8324 {
8325 fprintf (dump_file,
8326 "Generating initial constraints for %s",
8327 node->dump_name ());
8328 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8329 fprintf (dump_file, " (%s)",
8330 IDENTIFIER_POINTER
8331 (DECL_ASSEMBLER_NAME (node->decl)));
8332 fprintf (dump_file, "\n\n");
8333 dump_constraints (dump_file, from);
8334 fprintf (dump_file, "\n");
8335
8336 from = constraints.length ();
8337 }
8338
8339 node->call_for_symbol_thunks_and_aliases
8340 (associate_varinfo_to_alias, vi, true);
8341 }
8342
8343 /* Create constraints for global variables and their initializers. */
8344 FOR_EACH_VARIABLE (var)
8345 {
8346 if (var->alias && var->analyzed)
8347 continue;
8348
8349 varinfo_t vi = get_vi_for_tree (var->decl);
8350
8351 /* For the purpose of IPA PTA unit-local globals are not
8352 escape points. */
8353 bool nonlocal_p = (DECL_EXTERNAL (var->decl)
8354 || TREE_PUBLIC (var->decl)
8355 || var->used_from_other_partition
8356 || var->force_output);
8357 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8358 &nonlocal_p, true);
8359 if (nonlocal_p)
8360 vi->is_ipa_escape_point = true;
8361 }
8362
8363 if (dump_file
8364 && from != constraints.length ())
8365 {
8366 fprintf (dump_file,
8367 "Generating constraints for global initializers\n\n");
8368 dump_constraints (dump_file, from);
8369 fprintf (dump_file, "\n");
8370 from = constraints.length ();
8371 }
8372
8373 FOR_EACH_DEFINED_FUNCTION (node)
8374 {
8375 struct function *func;
8376 basic_block bb;
8377
8378 /* Nodes without a body in this partition are not interesting. */
8379 if (!node->has_gimple_body_p ()
8380 || node->in_other_partition
8381 || node->clone_of)
8382 continue;
8383
8384 if (dump_file)
8385 {
8386 fprintf (dump_file,
8387 "Generating constraints for %s", node->dump_name ());
8388 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8389 fprintf (dump_file, " (%s)",
8390 IDENTIFIER_POINTER
8391 (DECL_ASSEMBLER_NAME (node->decl)));
8392 fprintf (dump_file, "\n");
8393 }
8394
8395 func = DECL_STRUCT_FUNCTION (node->decl);
8396 gcc_assert (cfun == NULL);
8397
8398 /* Build constriants for the function body. */
8399 FOR_EACH_BB_FN (bb, func)
8400 {
8401 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8402 gsi_next (&gsi))
8403 {
8404 gphi *phi = gsi.phi ();
8405
8406 if (! virtual_operand_p (gimple_phi_result (phi)))
8407 find_func_aliases (func, phi);
8408 }
8409
8410 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8411 gsi_next (&gsi))
8412 {
8413 gimple *stmt = gsi_stmt (gsi);
8414
8415 find_func_aliases (func, stmt);
8416 find_func_clobbers (func, stmt);
8417 }
8418 }
8419
8420 if (dump_file)
8421 {
8422 fprintf (dump_file, "\n");
8423 dump_constraints (dump_file, from);
8424 fprintf (dump_file, "\n");
8425 from = constraints.length ();
8426 }
8427 }
8428
8429 /* From the constraints compute the points-to sets. */
8430 solve_constraints ();
8431
8432 if (dump_file)
8433 dump_sa_points_to_info (dump_file);
8434
8435 /* Now post-process solutions to handle locals from different
8436 runtime instantiations coming in through recursive invocations. */
8437 unsigned shadow_var_cnt = 0;
8438 for (unsigned i = 1; i < varmap.length (); ++i)
8439 {
8440 varinfo_t fi = get_varinfo (i);
8441 if (fi->is_fn_info
8442 && fi->decl)
8443 /* Automatic variables pointed to by their containing functions
8444 parameters need this treatment. */
8445 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8446 ai; ai = vi_next (ai))
8447 {
8448 varinfo_t vi = get_varinfo (find (ai->id));
8449 bitmap_iterator bi;
8450 unsigned j;
8451 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8452 {
8453 varinfo_t pt = get_varinfo (j);
8454 if (pt->shadow_var_uid == 0
8455 && pt->decl
8456 && auto_var_in_fn_p (pt->decl, fi->decl))
8457 {
8458 pt->shadow_var_uid = allocate_decl_uid ();
8459 shadow_var_cnt++;
8460 }
8461 }
8462 }
8463 /* As well as global variables which are another way of passing
8464 arguments to recursive invocations. */
8465 else if (fi->is_global_var)
8466 {
8467 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8468 {
8469 varinfo_t vi = get_varinfo (find (ai->id));
8470 bitmap_iterator bi;
8471 unsigned j;
8472 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8473 {
8474 varinfo_t pt = get_varinfo (j);
8475 if (pt->shadow_var_uid == 0
8476 && pt->decl
8477 && auto_var_p (pt->decl))
8478 {
8479 pt->shadow_var_uid = allocate_decl_uid ();
8480 shadow_var_cnt++;
8481 }
8482 }
8483 }
8484 }
8485 }
8486 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8487 fprintf (dump_file, "Allocated %u shadow variables for locals "
8488 "maybe leaking into recursive invocations of their containing "
8489 "functions\n", shadow_var_cnt);
8490
8491 /* Compute the global points-to sets for ESCAPED.
8492 ??? Note that the computed escape set is not correct
8493 for the whole unit as we fail to consider graph edges to
8494 externally visible functions. */
8495 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8496
8497 /* Make sure the ESCAPED solution (which is used as placeholder in
8498 other solutions) does not reference itself. This simplifies
8499 points-to solution queries. */
8500 ipa_escaped_pt.ipa_escaped = 0;
8501
8502 /* Assign the points-to sets to the SSA names in the unit. */
8503 FOR_EACH_DEFINED_FUNCTION (node)
8504 {
8505 tree ptr;
8506 struct function *fn;
8507 unsigned i;
8508 basic_block bb;
8509
8510 /* Nodes without a body in this partition are not interesting. */
8511 if (!node->has_gimple_body_p ()
8512 || node->in_other_partition
8513 || node->clone_of)
8514 continue;
8515
8516 fn = DECL_STRUCT_FUNCTION (node->decl);
8517
8518 /* Compute the points-to sets for pointer SSA_NAMEs. */
8519 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8520 {
8521 if (ptr
8522 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8523 find_what_p_points_to (node->decl, ptr);
8524 }
8525
8526 /* Compute the call-use and call-clobber sets for indirect calls
8527 and calls to external functions. */
8528 FOR_EACH_BB_FN (bb, fn)
8529 {
8530 gimple_stmt_iterator gsi;
8531
8532 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8533 {
8534 gcall *stmt;
8535 struct pt_solution *pt;
8536 varinfo_t vi, fi;
8537 tree decl;
8538
8539 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8540 if (!stmt)
8541 continue;
8542
8543 /* Handle direct calls to functions with body. */
8544 decl = gimple_call_fndecl (stmt);
8545
8546 {
8547 tree called_decl = NULL_TREE;
8548 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8549 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8550 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8551 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8552
8553 if (called_decl != NULL_TREE
8554 && !fndecl_maybe_in_other_partition (called_decl))
8555 decl = called_decl;
8556 }
8557
8558 if (decl
8559 && (fi = lookup_vi_for_tree (decl))
8560 && fi->is_fn_info)
8561 {
8562 *gimple_call_clobber_set (stmt)
8563 = find_what_var_points_to
8564 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8565 *gimple_call_use_set (stmt)
8566 = find_what_var_points_to
8567 (node->decl, first_vi_for_offset (fi, fi_uses));
8568 }
8569 /* Handle direct calls to external functions. */
8570 else if (decl && (!fi || fi->decl))
8571 {
8572 pt = gimple_call_use_set (stmt);
8573 if (gimple_call_flags (stmt) & ECF_CONST)
8574 memset (pt, 0, sizeof (struct pt_solution));
8575 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8576 {
8577 *pt = find_what_var_points_to (node->decl, vi);
8578 /* Escaped (and thus nonlocal) variables are always
8579 implicitly used by calls. */
8580 /* ??? ESCAPED can be empty even though NONLOCAL
8581 always escaped. */
8582 pt->nonlocal = 1;
8583 pt->ipa_escaped = 1;
8584 }
8585 else
8586 {
8587 /* If there is nothing special about this call then
8588 we have made everything that is used also escape. */
8589 *pt = ipa_escaped_pt;
8590 pt->nonlocal = 1;
8591 }
8592
8593 pt = gimple_call_clobber_set (stmt);
8594 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8595 memset (pt, 0, sizeof (struct pt_solution));
8596 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8597 {
8598 *pt = find_what_var_points_to (node->decl, vi);
8599 /* Escaped (and thus nonlocal) variables are always
8600 implicitly clobbered by calls. */
8601 /* ??? ESCAPED can be empty even though NONLOCAL
8602 always escaped. */
8603 pt->nonlocal = 1;
8604 pt->ipa_escaped = 1;
8605 }
8606 else
8607 {
8608 /* If there is nothing special about this call then
8609 we have made everything that is used also escape. */
8610 *pt = ipa_escaped_pt;
8611 pt->nonlocal = 1;
8612 }
8613 }
8614 /* Handle indirect calls. */
8615 else if ((fi = get_fi_for_callee (stmt)))
8616 {
8617 /* We need to accumulate all clobbers/uses of all possible
8618 callees. */
8619 fi = get_varinfo (find (fi->id));
8620 /* If we cannot constrain the set of functions we'll end up
8621 calling we end up using/clobbering everything. */
8622 if (bitmap_bit_p (fi->solution, anything_id)
8623 || bitmap_bit_p (fi->solution, nonlocal_id)
8624 || bitmap_bit_p (fi->solution, escaped_id))
8625 {
8626 pt_solution_reset (gimple_call_clobber_set (stmt));
8627 pt_solution_reset (gimple_call_use_set (stmt));
8628 }
8629 else
8630 {
8631 bitmap_iterator bi;
8632 unsigned i;
8633 struct pt_solution *uses, *clobbers;
8634
8635 uses = gimple_call_use_set (stmt);
8636 clobbers = gimple_call_clobber_set (stmt);
8637 memset (uses, 0, sizeof (struct pt_solution));
8638 memset (clobbers, 0, sizeof (struct pt_solution));
8639 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8640 {
8641 struct pt_solution sol;
8642
8643 vi = get_varinfo (i);
8644 if (!vi->is_fn_info)
8645 {
8646 /* ??? We could be more precise here? */
8647 uses->nonlocal = 1;
8648 uses->ipa_escaped = 1;
8649 clobbers->nonlocal = 1;
8650 clobbers->ipa_escaped = 1;
8651 continue;
8652 }
8653
8654 if (!uses->anything)
8655 {
8656 sol = find_what_var_points_to
8657 (node->decl,
8658 first_vi_for_offset (vi, fi_uses));
8659 pt_solution_ior_into (uses, &sol);
8660 }
8661 if (!clobbers->anything)
8662 {
8663 sol = find_what_var_points_to
8664 (node->decl,
8665 first_vi_for_offset (vi, fi_clobbers));
8666 pt_solution_ior_into (clobbers, &sol);
8667 }
8668 }
8669 }
8670 }
8671 else
8672 gcc_unreachable ();
8673 }
8674 }
8675
8676 fn->gimple_df->ipa_pta = true;
8677
8678 /* We have to re-set the final-solution cache after each function
8679 because what is a "global" is dependent on function context. */
8680 final_solutions->empty ();
8681 obstack_free (&final_solutions_obstack, NULL);
8682 gcc_obstack_init (&final_solutions_obstack);
8683 }
8684
8685 delete_points_to_sets ();
8686
8687 in_ipa_mode = 0;
8688
8689 return 0;
8690 }
8691
8692 namespace {
8693
8694 const pass_data pass_data_ipa_pta =
8695 {
8696 SIMPLE_IPA_PASS, /* type */
8697 "pta", /* name */
8698 OPTGROUP_NONE, /* optinfo_flags */
8699 TV_IPA_PTA, /* tv_id */
8700 0, /* properties_required */
8701 0, /* properties_provided */
8702 0, /* properties_destroyed */
8703 0, /* todo_flags_start */
8704 0, /* todo_flags_finish */
8705 };
8706
8707 class pass_ipa_pta : public simple_ipa_opt_pass
8708 {
8709 public:
pass_ipa_pta(gcc::context * ctxt)8710 pass_ipa_pta (gcc::context *ctxt)
8711 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8712 {}
8713
8714 /* opt_pass methods: */
gate(function *)8715 virtual bool gate (function *)
8716 {
8717 return (optimize
8718 && flag_ipa_pta
8719 /* Don't bother doing anything if the program has errors. */
8720 && !seen_error ());
8721 }
8722
clone()8723 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8724
execute(function *)8725 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8726
8727 }; // class pass_ipa_pta
8728
8729 } // anon namespace
8730
8731 simple_ipa_opt_pass *
make_pass_ipa_pta(gcc::context * ctxt)8732 make_pass_ipa_pta (gcc::context *ctxt)
8733 {
8734 return new pass_ipa_pta (ctxt);
8735 }
8736