xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-structalias.c (revision d909946ca08dceb44d7d0f22ec9488679695d976)
1 /* Tree based points-to analysis
2    Copyright (C) 2005-2013 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dberlin@dberlin.org>
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3 of the License, or
10    (at your option) any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "basic-block.h"
30 #include "tree.h"
31 #include "tree-flow.h"
32 #include "tree-inline.h"
33 #include "diagnostic-core.h"
34 #include "gimple.h"
35 #include "hashtab.h"
36 #include "function.h"
37 #include "cgraph.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "splay-tree.h"
41 #include "params.h"
42 #include "cgraph.h"
43 #include "alias.h"
44 #include "pointer-set.h"
45 
46 /* The idea behind this analyzer is to generate set constraints from the
47    program, then solve the resulting constraints in order to generate the
48    points-to sets.
49 
50    Set constraints are a way of modeling program analysis problems that
51    involve sets.  They consist of an inclusion constraint language,
52    describing the variables (each variable is a set) and operations that
53    are involved on the variables, and a set of rules that derive facts
54    from these operations.  To solve a system of set constraints, you derive
55    all possible facts under the rules, which gives you the correct sets
56    as a consequence.
57 
58    See  "Efficient Field-sensitive pointer analysis for C" by "David
59    J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60    http://citeseer.ist.psu.edu/pearce04efficient.html
61 
62    Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63    of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64    http://citeseer.ist.psu.edu/heintze01ultrafast.html
65 
66    There are three types of real constraint expressions, DEREF,
67    ADDRESSOF, and SCALAR.  Each constraint expression consists
68    of a constraint type, a variable, and an offset.
69 
70    SCALAR is a constraint expression type used to represent x, whether
71    it appears on the LHS or the RHS of a statement.
72    DEREF is a constraint expression type used to represent *x, whether
73    it appears on the LHS or the RHS of a statement.
74    ADDRESSOF is a constraint expression used to represent &x, whether
75    it appears on the LHS or the RHS of a statement.
76 
77    Each pointer variable in the program is assigned an integer id, and
78    each field of a structure variable is assigned an integer id as well.
79 
80    Structure variables are linked to their list of fields through a "next
81    field" in each variable that points to the next field in offset
82    order.
83    Each variable for a structure field has
84 
85    1. "size", that tells the size in bits of that field.
86    2. "fullsize, that tells the size in bits of the entire structure.
87    3. "offset", that tells the offset in bits from the beginning of the
88    structure to this field.
89 
90    Thus,
91    struct f
92    {
93      int a;
94      int b;
95    } foo;
96    int *bar;
97 
98    looks like
99 
100    foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101    foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102    bar -> id 3, size 32, offset 0, fullsize 32, next NULL
103 
104 
105   In order to solve the system of set constraints, the following is
106   done:
107 
108   1. Each constraint variable x has a solution set associated with it,
109   Sol(x).
110 
111   2. Constraints are separated into direct, copy, and complex.
112   Direct constraints are ADDRESSOF constraints that require no extra
113   processing, such as P = &Q
114   Copy constraints are those of the form P = Q.
115   Complex constraints are all the constraints involving dereferences
116   and offsets (including offsetted copies).
117 
118   3. All direct constraints of the form P = &Q are processed, such
119   that Q is added to Sol(P)
120 
121   4. All complex constraints for a given constraint variable are stored in a
122   linked list attached to that variable's node.
123 
124   5. A directed graph is built out of the copy constraints. Each
125   constraint variable is a node in the graph, and an edge from
126   Q to P is added for each copy constraint of the form P = Q
127 
128   6. The graph is then walked, and solution sets are
129   propagated along the copy edges, such that an edge from Q to P
130   causes Sol(P) <- Sol(P) union Sol(Q).
131 
132   7.  As we visit each node, all complex constraints associated with
133   that node are processed by adding appropriate copy edges to the graph, or the
134   appropriate variables to the solution set.
135 
136   8. The process of walking the graph is iterated until no solution
137   sets change.
138 
139   Prior to walking the graph in steps 6 and 7, We perform static
140   cycle elimination on the constraint graph, as well
141   as off-line variable substitution.
142 
143   TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144   on and turned into anything), but isn't.  You can just see what offset
145   inside the pointed-to struct it's going to access.
146 
147   TODO: Constant bounded arrays can be handled as if they were structs of the
148   same number of elements.
149 
150   TODO: Modeling heap and incoming pointers becomes much better if we
151   add fields to them as we discover them, which we could do.
152 
153   TODO: We could handle unions, but to be honest, it's probably not
154   worth the pain or slowdown.  */
155 
156 /* IPA-PTA optimizations possible.
157 
158    When the indirect function called is ANYTHING we can add disambiguation
159    based on the function signatures (or simply the parameter count which
160    is the varinfo size).  We also do not need to consider functions that
161    do not have their address taken.
162 
163    The is_global_var bit which marks escape points is overly conservative
164    in IPA mode.  Split it to is_escape_point and is_global_var - only
165    externally visible globals are escape points in IPA mode.  This is
166    also needed to fix the pt_solution_includes_global predicate
167    (and thus ptr_deref_may_alias_global_p).
168 
169    The way we introduce DECL_PT_UID to avoid fixing up all points-to
170    sets in the translation unit when we copy a DECL during inlining
171    pessimizes precision.  The advantage is that the DECL_PT_UID keeps
172    compile-time and memory usage overhead low - the points-to sets
173    do not grow or get unshared as they would during a fixup phase.
174    An alternative solution is to delay IPA PTA until after all
175    inlining transformations have been applied.
176 
177    The way we propagate clobber/use information isn't optimized.
178    It should use a new complex constraint that properly filters
179    out local variables of the callee (though that would make
180    the sets invalid after inlining).  OTOH we might as well
181    admit defeat to WHOPR and simply do all the clobber/use analysis
182    and propagation after PTA finished but before we threw away
183    points-to information for memory variables.  WHOPR and PTA
184    do not play along well anyway - the whole constraint solving
185    would need to be done in WPA phase and it will be very interesting
186    to apply the results to local SSA names during LTRANS phase.
187 
188    We probably should compute a per-function unit-ESCAPE solution
189    propagating it simply like the clobber / uses solutions.  The
190    solution can go alongside the non-IPA espaced solution and be
191    used to query which vars escape the unit through a function.
192 
193    We never put function decls in points-to sets so we do not
194    keep the set of called functions for indirect calls.
195 
196    And probably more.  */
197 
198 static bool use_field_sensitive = true;
199 static int in_ipa_mode = 0;
200 
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack;
203 
204 /* Used for points-to sets.  */
205 static bitmap_obstack pta_obstack;
206 
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack;
209 
210 /* Used for per-solver-iteration bitmaps.  */
211 static bitmap_obstack iteration_obstack;
212 
213 static unsigned int create_variable_info_for (tree, const char *);
214 typedef struct constraint_graph *constraint_graph_t;
215 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
216 
217 struct constraint;
218 typedef struct constraint *constraint_t;
219 
220 
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d)	\
222   if (a)						\
223     EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
224 
225 static struct constraint_stats
226 {
227   unsigned int total_vars;
228   unsigned int nonpointer_vars;
229   unsigned int unified_vars_static;
230   unsigned int unified_vars_dynamic;
231   unsigned int iterations;
232   unsigned int num_edges;
233   unsigned int num_implicit_edges;
234   unsigned int points_to_sets_created;
235 } stats;
236 
237 struct variable_info
238 {
239   /* ID of this variable  */
240   unsigned int id;
241 
242   /* True if this is a variable created by the constraint analysis, such as
243      heap variables and constraints we had to break up.  */
244   unsigned int is_artificial_var : 1;
245 
246   /* True if this is a special variable whose solution set should not be
247      changed.  */
248   unsigned int is_special_var : 1;
249 
250   /* True for variables whose size is not known or variable.  */
251   unsigned int is_unknown_size_var : 1;
252 
253   /* True for (sub-)fields that represent a whole variable.  */
254   unsigned int is_full_var : 1;
255 
256   /* True if this is a heap variable.  */
257   unsigned int is_heap_var : 1;
258 
259   /* True if this field may contain pointers.  */
260   unsigned int may_have_pointers : 1;
261 
262   /* True if this field has only restrict qualified pointers.  */
263   unsigned int only_restrict_pointers : 1;
264 
265   /* True if this represents a global variable.  */
266   unsigned int is_global_var : 1;
267 
268   /* True if this represents a IPA function info.  */
269   unsigned int is_fn_info : 1;
270 
271   /* A link to the variable for the next field in this structure.  */
272   struct variable_info *next;
273 
274   /* Offset of this variable, in bits, from the base variable  */
275   unsigned HOST_WIDE_INT offset;
276 
277   /* Size of the variable, in bits.  */
278   unsigned HOST_WIDE_INT size;
279 
280   /* Full size of the base variable, in bits.  */
281   unsigned HOST_WIDE_INT fullsize;
282 
283   /* Name of this variable */
284   const char *name;
285 
286   /* Tree that this variable is associated with.  */
287   tree decl;
288 
289   /* Points-to set for this variable.  */
290   bitmap solution;
291 
292   /* Old points-to set for this variable.  */
293   bitmap oldsolution;
294 };
295 typedef struct variable_info *varinfo_t;
296 
297 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
298 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
299 						   unsigned HOST_WIDE_INT);
300 static varinfo_t lookup_vi_for_tree (tree);
301 static inline bool type_can_have_subvars (const_tree);
302 
303 /* Pool of variable info structures.  */
304 static alloc_pool variable_info_pool;
305 
306 /* Map varinfo to final pt_solution.  */
307 static pointer_map_t *final_solutions;
308 struct obstack final_solutions_obstack;
309 
310 /* Table of variable info structures for constraint variables.
311    Indexed directly by variable info id.  */
312 static vec<varinfo_t> varmap;
313 
314 /* Return the varmap element N */
315 
316 static inline varinfo_t
317 get_varinfo (unsigned int n)
318 {
319   return varmap[n];
320 }
321 
322 /* Static IDs for the special variables.  */
323 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
324        escaped_id = 3, nonlocal_id = 4,
325        storedanything_id = 5, integer_id = 6 };
326 
327 /* Return a new variable info structure consisting for a variable
328    named NAME, and using constraint graph node NODE.  Append it
329    to the vector of variable info structures.  */
330 
331 static varinfo_t
332 new_var_info (tree t, const char *name)
333 {
334   unsigned index = varmap.length ();
335   varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
336 
337   ret->id = index;
338   ret->name = name;
339   ret->decl = t;
340   /* Vars without decl are artificial and do not have sub-variables.  */
341   ret->is_artificial_var = (t == NULL_TREE);
342   ret->is_special_var = false;
343   ret->is_unknown_size_var = false;
344   ret->is_full_var = (t == NULL_TREE);
345   ret->is_heap_var = false;
346   ret->may_have_pointers = true;
347   ret->only_restrict_pointers = false;
348   ret->is_global_var = (t == NULL_TREE);
349   ret->is_fn_info = false;
350   if (t && DECL_P (t))
351     ret->is_global_var = (is_global_var (t)
352 			  /* We have to treat even local register variables
353 			     as escape points.  */
354 			  || (TREE_CODE (t) == VAR_DECL
355 			      && DECL_HARD_REGISTER (t)));
356   ret->solution = BITMAP_ALLOC (&pta_obstack);
357   ret->oldsolution = NULL;
358   ret->next = NULL;
359 
360   stats.total_vars++;
361 
362   varmap.safe_push (ret);
363 
364   return ret;
365 }
366 
367 
368 /* A map mapping call statements to per-stmt variables for uses
369    and clobbers specific to the call.  */
370 struct pointer_map_t *call_stmt_vars;
371 
372 /* Lookup or create the variable for the call statement CALL.  */
373 
374 static varinfo_t
375 get_call_vi (gimple call)
376 {
377   void **slot_p;
378   varinfo_t vi, vi2;
379 
380   slot_p = pointer_map_insert (call_stmt_vars, call);
381   if (*slot_p)
382     return (varinfo_t) *slot_p;
383 
384   vi = new_var_info (NULL_TREE, "CALLUSED");
385   vi->offset = 0;
386   vi->size = 1;
387   vi->fullsize = 2;
388   vi->is_full_var = true;
389 
390   vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
391   vi2->offset = 1;
392   vi2->size = 1;
393   vi2->fullsize = 2;
394   vi2->is_full_var = true;
395 
396   *slot_p = (void *) vi;
397   return vi;
398 }
399 
400 /* Lookup the variable for the call statement CALL representing
401    the uses.  Returns NULL if there is nothing special about this call.  */
402 
403 static varinfo_t
404 lookup_call_use_vi (gimple call)
405 {
406   void **slot_p;
407 
408   slot_p = pointer_map_contains (call_stmt_vars, call);
409   if (slot_p)
410     return (varinfo_t) *slot_p;
411 
412   return NULL;
413 }
414 
415 /* Lookup the variable for the call statement CALL representing
416    the clobbers.  Returns NULL if there is nothing special about this call.  */
417 
418 static varinfo_t
419 lookup_call_clobber_vi (gimple call)
420 {
421   varinfo_t uses = lookup_call_use_vi (call);
422   if (!uses)
423     return NULL;
424 
425   return uses->next;
426 }
427 
428 /* Lookup or create the variable for the call statement CALL representing
429    the uses.  */
430 
431 static varinfo_t
432 get_call_use_vi (gimple call)
433 {
434   return get_call_vi (call);
435 }
436 
437 /* Lookup or create the variable for the call statement CALL representing
438    the clobbers.  */
439 
440 static varinfo_t ATTRIBUTE_UNUSED
441 get_call_clobber_vi (gimple call)
442 {
443   return get_call_vi (call)->next;
444 }
445 
446 
447 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
448 
449 /* An expression that appears in a constraint.  */
450 
451 struct constraint_expr
452 {
453   /* Constraint type.  */
454   constraint_expr_type type;
455 
456   /* Variable we are referring to in the constraint.  */
457   unsigned int var;
458 
459   /* Offset, in bits, of this constraint from the beginning of
460      variables it ends up referring to.
461 
462      IOW, in a deref constraint, we would deref, get the result set,
463      then add OFFSET to each member.   */
464   HOST_WIDE_INT offset;
465 };
466 
467 /* Use 0x8000... as special unknown offset.  */
468 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
469 
470 typedef struct constraint_expr ce_s;
471 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
472 static void get_constraint_for (tree, vec<ce_s> *);
473 static void get_constraint_for_rhs (tree, vec<ce_s> *);
474 static void do_deref (vec<ce_s> *);
475 
476 /* Our set constraints are made up of two constraint expressions, one
477    LHS, and one RHS.
478 
479    As described in the introduction, our set constraints each represent an
480    operation between set valued variables.
481 */
482 struct constraint
483 {
484   struct constraint_expr lhs;
485   struct constraint_expr rhs;
486 };
487 
488 /* List of constraints that we use to build the constraint graph from.  */
489 
490 static vec<constraint_t> constraints;
491 static alloc_pool constraint_pool;
492 
493 /* The constraint graph is represented as an array of bitmaps
494    containing successor nodes.  */
495 
496 struct constraint_graph
497 {
498   /* Size of this graph, which may be different than the number of
499      nodes in the variable map.  */
500   unsigned int size;
501 
502   /* Explicit successors of each node. */
503   bitmap *succs;
504 
505   /* Implicit predecessors of each node (Used for variable
506      substitution). */
507   bitmap *implicit_preds;
508 
509   /* Explicit predecessors of each node (Used for variable substitution).  */
510   bitmap *preds;
511 
512   /* Indirect cycle representatives, or -1 if the node has no indirect
513      cycles.  */
514   int *indirect_cycles;
515 
516   /* Representative node for a node.  rep[a] == a unless the node has
517      been unified. */
518   unsigned int *rep;
519 
520   /* Equivalence class representative for a label.  This is used for
521      variable substitution.  */
522   int *eq_rep;
523 
524   /* Pointer equivalence label for a node.  All nodes with the same
525      pointer equivalence label can be unified together at some point
526      (either during constraint optimization or after the constraint
527      graph is built).  */
528   unsigned int *pe;
529 
530   /* Pointer equivalence representative for a label.  This is used to
531      handle nodes that are pointer equivalent but not location
532      equivalent.  We can unite these once the addressof constraints
533      are transformed into initial points-to sets.  */
534   int *pe_rep;
535 
536   /* Pointer equivalence label for each node, used during variable
537      substitution.  */
538   unsigned int *pointer_label;
539 
540   /* Location equivalence label for each node, used during location
541      equivalence finding.  */
542   unsigned int *loc_label;
543 
544   /* Pointed-by set for each node, used during location equivalence
545      finding.  This is pointed-by rather than pointed-to, because it
546      is constructed using the predecessor graph.  */
547   bitmap *pointed_by;
548 
549   /* Points to sets for pointer equivalence.  This is *not* the actual
550      points-to sets for nodes.  */
551   bitmap *points_to;
552 
553   /* Bitmap of nodes where the bit is set if the node is a direct
554      node.  Used for variable substitution.  */
555   sbitmap direct_nodes;
556 
557   /* Bitmap of nodes where the bit is set if the node is address
558      taken.  Used for variable substitution.  */
559   bitmap address_taken;
560 
561   /* Vector of complex constraints for each graph node.  Complex
562      constraints are those involving dereferences or offsets that are
563      not 0.  */
564   vec<constraint_t> *complex;
565 };
566 
567 static constraint_graph_t graph;
568 
569 /* During variable substitution and the offline version of indirect
570    cycle finding, we create nodes to represent dereferences and
571    address taken constraints.  These represent where these start and
572    end.  */
573 #define FIRST_REF_NODE (varmap).length ()
574 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
575 
576 /* Return the representative node for NODE, if NODE has been unioned
577    with another NODE.
578    This function performs path compression along the way to finding
579    the representative.  */
580 
581 static unsigned int
582 find (unsigned int node)
583 {
584   gcc_assert (node < graph->size);
585   if (graph->rep[node] != node)
586     return graph->rep[node] = find (graph->rep[node]);
587   return node;
588 }
589 
590 /* Union the TO and FROM nodes to the TO nodes.
591    Note that at some point in the future, we may want to do
592    union-by-rank, in which case we are going to have to return the
593    node we unified to.  */
594 
595 static bool
596 unite (unsigned int to, unsigned int from)
597 {
598   gcc_assert (to < graph->size && from < graph->size);
599   if (to != from && graph->rep[from] != to)
600     {
601       graph->rep[from] = to;
602       return true;
603     }
604   return false;
605 }
606 
607 /* Create a new constraint consisting of LHS and RHS expressions.  */
608 
609 static constraint_t
610 new_constraint (const struct constraint_expr lhs,
611 		const struct constraint_expr rhs)
612 {
613   constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
614   ret->lhs = lhs;
615   ret->rhs = rhs;
616   return ret;
617 }
618 
619 /* Print out constraint C to FILE.  */
620 
621 static void
622 dump_constraint (FILE *file, constraint_t c)
623 {
624   if (c->lhs.type == ADDRESSOF)
625     fprintf (file, "&");
626   else if (c->lhs.type == DEREF)
627     fprintf (file, "*");
628   fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
629   if (c->lhs.offset == UNKNOWN_OFFSET)
630     fprintf (file, " + UNKNOWN");
631   else if (c->lhs.offset != 0)
632     fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
633   fprintf (file, " = ");
634   if (c->rhs.type == ADDRESSOF)
635     fprintf (file, "&");
636   else if (c->rhs.type == DEREF)
637     fprintf (file, "*");
638   fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
639   if (c->rhs.offset == UNKNOWN_OFFSET)
640     fprintf (file, " + UNKNOWN");
641   else if (c->rhs.offset != 0)
642     fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
643 }
644 
645 
646 void debug_constraint (constraint_t);
647 void debug_constraints (void);
648 void debug_constraint_graph (void);
649 void debug_solution_for_var (unsigned int);
650 void debug_sa_points_to_info (void);
651 
652 /* Print out constraint C to stderr.  */
653 
654 DEBUG_FUNCTION void
655 debug_constraint (constraint_t c)
656 {
657   dump_constraint (stderr, c);
658   fprintf (stderr, "\n");
659 }
660 
661 /* Print out all constraints to FILE */
662 
663 static void
664 dump_constraints (FILE *file, int from)
665 {
666   int i;
667   constraint_t c;
668   for (i = from; constraints.iterate (i, &c); i++)
669     if (c)
670       {
671 	dump_constraint (file, c);
672 	fprintf (file, "\n");
673       }
674 }
675 
676 /* Print out all constraints to stderr.  */
677 
678 DEBUG_FUNCTION void
679 debug_constraints (void)
680 {
681   dump_constraints (stderr, 0);
682 }
683 
684 /* Print the constraint graph in dot format.  */
685 
686 static void
687 dump_constraint_graph (FILE *file)
688 {
689   unsigned int i;
690 
691   /* Only print the graph if it has already been initialized:  */
692   if (!graph)
693     return;
694 
695   /* Prints the header of the dot file:  */
696   fprintf (file, "strict digraph {\n");
697   fprintf (file, "  node [\n    shape = box\n  ]\n");
698   fprintf (file, "  edge [\n    fontsize = \"12\"\n  ]\n");
699   fprintf (file, "\n  // List of nodes and complex constraints in "
700 	   "the constraint graph:\n");
701 
702   /* The next lines print the nodes in the graph together with the
703      complex constraints attached to them.  */
704   for (i = 0; i < graph->size; i++)
705     {
706       if (find (i) != i)
707 	continue;
708       if (i < FIRST_REF_NODE)
709 	fprintf (file, "\"%s\"", get_varinfo (i)->name);
710       else
711 	fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
712       if (graph->complex[i].exists ())
713 	{
714 	  unsigned j;
715 	  constraint_t c;
716 	  fprintf (file, " [label=\"\\N\\n");
717 	  for (j = 0; graph->complex[i].iterate (j, &c); ++j)
718 	    {
719 	      dump_constraint (file, c);
720 	      fprintf (file, "\\l");
721 	    }
722 	  fprintf (file, "\"]");
723 	}
724       fprintf (file, ";\n");
725     }
726 
727   /* Go over the edges.  */
728   fprintf (file, "\n  // Edges in the constraint graph:\n");
729   for (i = 0; i < graph->size; i++)
730     {
731       unsigned j;
732       bitmap_iterator bi;
733       if (find (i) != i)
734 	continue;
735       EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
736 	{
737 	  unsigned to = find (j);
738 	  if (i == to)
739 	    continue;
740 	  if (i < FIRST_REF_NODE)
741 	    fprintf (file, "\"%s\"", get_varinfo (i)->name);
742 	  else
743 	    fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
744 	  fprintf (file, " -> ");
745 	  if (to < FIRST_REF_NODE)
746 	    fprintf (file, "\"%s\"", get_varinfo (to)->name);
747 	  else
748 	    fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
749 	  fprintf (file, ";\n");
750 	}
751     }
752 
753   /* Prints the tail of the dot file.  */
754   fprintf (file, "}\n");
755 }
756 
757 /* Print out the constraint graph to stderr.  */
758 
759 DEBUG_FUNCTION void
760 debug_constraint_graph (void)
761 {
762   dump_constraint_graph (stderr);
763 }
764 
765 /* SOLVER FUNCTIONS
766 
767    The solver is a simple worklist solver, that works on the following
768    algorithm:
769 
770    sbitmap changed_nodes = all zeroes;
771    changed_count = 0;
772    For each node that is not already collapsed:
773        changed_count++;
774        set bit in changed nodes
775 
776    while (changed_count > 0)
777    {
778      compute topological ordering for constraint graph
779 
780      find and collapse cycles in the constraint graph (updating
781      changed if necessary)
782 
783      for each node (n) in the graph in topological order:
784        changed_count--;
785 
786        Process each complex constraint associated with the node,
787        updating changed if necessary.
788 
789        For each outgoing edge from n, propagate the solution from n to
790        the destination of the edge, updating changed as necessary.
791 
792    }  */
793 
794 /* Return true if two constraint expressions A and B are equal.  */
795 
796 static bool
797 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
798 {
799   return a.type == b.type && a.var == b.var && a.offset == b.offset;
800 }
801 
802 /* Return true if constraint expression A is less than constraint expression
803    B.  This is just arbitrary, but consistent, in order to give them an
804    ordering.  */
805 
806 static bool
807 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
808 {
809   if (a.type == b.type)
810     {
811       if (a.var == b.var)
812 	return a.offset < b.offset;
813       else
814 	return a.var < b.var;
815     }
816   else
817     return a.type < b.type;
818 }
819 
820 /* Return true if constraint A is less than constraint B.  This is just
821    arbitrary, but consistent, in order to give them an ordering.  */
822 
823 static bool
824 constraint_less (const constraint_t &a, const constraint_t &b)
825 {
826   if (constraint_expr_less (a->lhs, b->lhs))
827     return true;
828   else if (constraint_expr_less (b->lhs, a->lhs))
829     return false;
830   else
831     return constraint_expr_less (a->rhs, b->rhs);
832 }
833 
834 /* Return true if two constraints A and B are equal.  */
835 
836 static bool
837 constraint_equal (struct constraint a, struct constraint b)
838 {
839   return constraint_expr_equal (a.lhs, b.lhs)
840     && constraint_expr_equal (a.rhs, b.rhs);
841 }
842 
843 
844 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
845 
846 static constraint_t
847 constraint_vec_find (vec<constraint_t> vec,
848 		     struct constraint lookfor)
849 {
850   unsigned int place;
851   constraint_t found;
852 
853   if (!vec.exists ())
854     return NULL;
855 
856   place = vec.lower_bound (&lookfor, constraint_less);
857   if (place >= vec.length ())
858     return NULL;
859   found = vec[place];
860   if (!constraint_equal (*found, lookfor))
861     return NULL;
862   return found;
863 }
864 
865 /* Union two constraint vectors, TO and FROM.  Put the result in TO.  */
866 
867 static void
868 constraint_set_union (vec<constraint_t> *to,
869 		      vec<constraint_t> *from)
870 {
871   int i;
872   constraint_t c;
873 
874   FOR_EACH_VEC_ELT (*from, i, c)
875     {
876       if (constraint_vec_find (*to, *c) == NULL)
877 	{
878 	  unsigned int place = to->lower_bound (c, constraint_less);
879 	  to->safe_insert (place, c);
880 	}
881     }
882 }
883 
884 /* Expands the solution in SET to all sub-fields of variables included.
885    Union the expanded result into RESULT.  */
886 
887 static void
888 solution_set_expand (bitmap result, bitmap set)
889 {
890   bitmap_iterator bi;
891   bitmap vars = NULL;
892   unsigned j;
893 
894   /* In a first pass record all variables we need to add all
895      sub-fields off.  This avoids quadratic behavior.  */
896   EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
897     {
898       varinfo_t v = get_varinfo (j);
899       if (v->is_artificial_var
900 	  || v->is_full_var)
901 	continue;
902       v = lookup_vi_for_tree (v->decl);
903       if (vars == NULL)
904 	vars = BITMAP_ALLOC (NULL);
905       bitmap_set_bit (vars, v->id);
906     }
907 
908   /* In the second pass now do the addition to the solution and
909      to speed up solving add it to the delta as well.  */
910   if (vars != NULL)
911     {
912       EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
913 	{
914 	  varinfo_t v = get_varinfo (j);
915 	  for (; v != NULL; v = v->next)
916 	    bitmap_set_bit (result, v->id);
917 	}
918       BITMAP_FREE (vars);
919     }
920 }
921 
922 /* Take a solution set SET, add OFFSET to each member of the set, and
923    overwrite SET with the result when done.  */
924 
925 static void
926 solution_set_add (bitmap set, HOST_WIDE_INT offset)
927 {
928   bitmap result = BITMAP_ALLOC (&iteration_obstack);
929   unsigned int i;
930   bitmap_iterator bi;
931 
932   /* If the offset is unknown we have to expand the solution to
933      all subfields.  */
934   if (offset == UNKNOWN_OFFSET)
935     {
936       solution_set_expand (set, set);
937       return;
938     }
939 
940   EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
941     {
942       varinfo_t vi = get_varinfo (i);
943 
944       /* If this is a variable with just one field just set its bit
945          in the result.  */
946       if (vi->is_artificial_var
947 	  || vi->is_unknown_size_var
948 	  || vi->is_full_var)
949 	bitmap_set_bit (result, i);
950       else
951 	{
952 	  HOST_WIDE_INT fieldoffset = vi->offset + offset;
953 	  unsigned HOST_WIDE_INT size = vi->size;
954 
955 	  /* If the offset makes the pointer point to before the
956 	     variable use offset zero for the field lookup.  */
957 	  if (fieldoffset < 0)
958 	    vi = lookup_vi_for_tree (vi->decl);
959 	  else
960 	    vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
961 
962 	  do
963 	    {
964 	      bitmap_set_bit (result, vi->id);
965 	      if (!vi->next)
966 		break;
967 
968 	      /* We have to include all fields that overlap the current field
969 		 shifted by offset.  */
970 	      vi = vi->next;
971 	    }
972 	  while (vi->offset < fieldoffset + size);
973 	}
974     }
975 
976   bitmap_copy (set, result);
977   BITMAP_FREE (result);
978 }
979 
980 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
981    process.  */
982 
983 static bool
984 set_union_with_increment  (bitmap to, bitmap from, HOST_WIDE_INT inc)
985 {
986   if (inc == 0)
987     return bitmap_ior_into (to, from);
988   else
989     {
990       bitmap tmp;
991       bool res;
992 
993       tmp = BITMAP_ALLOC (&iteration_obstack);
994       bitmap_copy (tmp, from);
995       solution_set_add (tmp, inc);
996       res = bitmap_ior_into (to, tmp);
997       BITMAP_FREE (tmp);
998       return res;
999     }
1000 }
1001 
1002 /* Insert constraint C into the list of complex constraints for graph
1003    node VAR.  */
1004 
1005 static void
1006 insert_into_complex (constraint_graph_t graph,
1007 		     unsigned int var, constraint_t c)
1008 {
1009   vec<constraint_t> complex = graph->complex[var];
1010   unsigned int place = complex.lower_bound (c, constraint_less);
1011 
1012   /* Only insert constraints that do not already exist.  */
1013   if (place >= complex.length ()
1014       || !constraint_equal (*c, *complex[place]))
1015     graph->complex[var].safe_insert (place, c);
1016 }
1017 
1018 
1019 /* Condense two variable nodes into a single variable node, by moving
1020    all associated info from SRC to TO.  */
1021 
1022 static void
1023 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1024 			unsigned int from)
1025 {
1026   unsigned int i;
1027   constraint_t c;
1028 
1029   gcc_assert (find (from) == to);
1030 
1031   /* Move all complex constraints from src node into to node  */
1032   FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1033     {
1034       /* In complex constraints for node src, we may have either
1035 	 a = *src, and *src = a, or an offseted constraint which are
1036 	 always added to the rhs node's constraints.  */
1037 
1038       if (c->rhs.type == DEREF)
1039 	c->rhs.var = to;
1040       else if (c->lhs.type == DEREF)
1041 	c->lhs.var = to;
1042       else
1043 	c->rhs.var = to;
1044     }
1045   constraint_set_union (&graph->complex[to], &graph->complex[from]);
1046   graph->complex[from].release ();
1047 }
1048 
1049 
1050 /* Remove edges involving NODE from GRAPH.  */
1051 
1052 static void
1053 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1054 {
1055   if (graph->succs[node])
1056     BITMAP_FREE (graph->succs[node]);
1057 }
1058 
1059 /* Merge GRAPH nodes FROM and TO into node TO.  */
1060 
1061 static void
1062 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1063 		   unsigned int from)
1064 {
1065   if (graph->indirect_cycles[from] != -1)
1066     {
1067       /* If we have indirect cycles with the from node, and we have
1068 	 none on the to node, the to node has indirect cycles from the
1069 	 from node now that they are unified.
1070 	 If indirect cycles exist on both, unify the nodes that they
1071 	 are in a cycle with, since we know they are in a cycle with
1072 	 each other.  */
1073       if (graph->indirect_cycles[to] == -1)
1074 	graph->indirect_cycles[to] = graph->indirect_cycles[from];
1075     }
1076 
1077   /* Merge all the successor edges.  */
1078   if (graph->succs[from])
1079     {
1080       if (!graph->succs[to])
1081 	graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1082       bitmap_ior_into (graph->succs[to],
1083 		       graph->succs[from]);
1084     }
1085 
1086   clear_edges_for_node (graph, from);
1087 }
1088 
1089 
1090 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1091    it doesn't exist in the graph already.  */
1092 
1093 static void
1094 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1095 			 unsigned int from)
1096 {
1097   if (to == from)
1098     return;
1099 
1100   if (!graph->implicit_preds[to])
1101     graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1102 
1103   if (bitmap_set_bit (graph->implicit_preds[to], from))
1104     stats.num_implicit_edges++;
1105 }
1106 
1107 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1108    it doesn't exist in the graph already.
1109    Return false if the edge already existed, true otherwise.  */
1110 
1111 static void
1112 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1113 		     unsigned int from)
1114 {
1115   if (!graph->preds[to])
1116     graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1117   bitmap_set_bit (graph->preds[to], from);
1118 }
1119 
1120 /* Add a graph edge to GRAPH, going from FROM to TO if
1121    it doesn't exist in the graph already.
1122    Return false if the edge already existed, true otherwise.  */
1123 
1124 static bool
1125 add_graph_edge (constraint_graph_t graph, unsigned int to,
1126 		unsigned int from)
1127 {
1128   if (to == from)
1129     {
1130       return false;
1131     }
1132   else
1133     {
1134       bool r = false;
1135 
1136       if (!graph->succs[from])
1137 	graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1138       if (bitmap_set_bit (graph->succs[from], to))
1139 	{
1140 	  r = true;
1141 	  if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1142 	    stats.num_edges++;
1143 	}
1144       return r;
1145     }
1146 }
1147 
1148 
1149 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH.  */
1150 
1151 static bool
1152 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1153 		  unsigned int dest)
1154 {
1155   return (graph->succs[dest]
1156 	  && bitmap_bit_p (graph->succs[dest], src));
1157 }
1158 
1159 /* Initialize the constraint graph structure to contain SIZE nodes.  */
1160 
1161 static void
1162 init_graph (unsigned int size)
1163 {
1164   unsigned int j;
1165 
1166   graph = XCNEW (struct constraint_graph);
1167   graph->size = size;
1168   graph->succs = XCNEWVEC (bitmap, graph->size);
1169   graph->indirect_cycles = XNEWVEC (int, graph->size);
1170   graph->rep = XNEWVEC (unsigned int, graph->size);
1171   /* ??? Macros do not support template types with multiple arguments,
1172      so we use a typedef to work around it.  */
1173   typedef vec<constraint_t> vec_constraint_t_heap;
1174   graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1175   graph->pe = XCNEWVEC (unsigned int, graph->size);
1176   graph->pe_rep = XNEWVEC (int, graph->size);
1177 
1178   for (j = 0; j < graph->size; j++)
1179     {
1180       graph->rep[j] = j;
1181       graph->pe_rep[j] = -1;
1182       graph->indirect_cycles[j] = -1;
1183     }
1184 }
1185 
1186 /* Build the constraint graph, adding only predecessor edges right now.  */
1187 
1188 static void
1189 build_pred_graph (void)
1190 {
1191   int i;
1192   constraint_t c;
1193   unsigned int j;
1194 
1195   graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1196   graph->preds = XCNEWVEC (bitmap, graph->size);
1197   graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1198   graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1199   graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1200   graph->points_to = XCNEWVEC (bitmap, graph->size);
1201   graph->eq_rep = XNEWVEC (int, graph->size);
1202   graph->direct_nodes = sbitmap_alloc (graph->size);
1203   graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1204   bitmap_clear (graph->direct_nodes);
1205 
1206   for (j = 0; j < FIRST_REF_NODE; j++)
1207     {
1208       if (!get_varinfo (j)->is_special_var)
1209 	bitmap_set_bit (graph->direct_nodes, j);
1210     }
1211 
1212   for (j = 0; j < graph->size; j++)
1213     graph->eq_rep[j] = -1;
1214 
1215   for (j = 0; j < varmap.length (); j++)
1216     graph->indirect_cycles[j] = -1;
1217 
1218   FOR_EACH_VEC_ELT (constraints, i, c)
1219     {
1220       struct constraint_expr lhs = c->lhs;
1221       struct constraint_expr rhs = c->rhs;
1222       unsigned int lhsvar = lhs.var;
1223       unsigned int rhsvar = rhs.var;
1224 
1225       if (lhs.type == DEREF)
1226 	{
1227 	  /* *x = y.  */
1228 	  if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1229 	    add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1230 	}
1231       else if (rhs.type == DEREF)
1232 	{
1233 	  /* x = *y */
1234 	  if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1235 	    add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1236 	  else
1237 	    bitmap_clear_bit (graph->direct_nodes, lhsvar);
1238 	}
1239       else if (rhs.type == ADDRESSOF)
1240 	{
1241 	  varinfo_t v;
1242 
1243 	  /* x = &y */
1244 	  if (graph->points_to[lhsvar] == NULL)
1245 	    graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1246 	  bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1247 
1248 	  if (graph->pointed_by[rhsvar] == NULL)
1249 	    graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1250 	  bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1251 
1252 	  /* Implicitly, *x = y */
1253 	  add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1254 
1255 	  /* All related variables are no longer direct nodes.  */
1256 	  bitmap_clear_bit (graph->direct_nodes, rhsvar);
1257           v = get_varinfo (rhsvar);
1258           if (!v->is_full_var)
1259             {
1260               v = lookup_vi_for_tree (v->decl);
1261               do
1262                 {
1263                   bitmap_clear_bit (graph->direct_nodes, v->id);
1264                   v = v->next;
1265                 }
1266               while (v != NULL);
1267             }
1268 	  bitmap_set_bit (graph->address_taken, rhsvar);
1269 	}
1270       else if (lhsvar > anything_id
1271 	       && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1272 	{
1273 	  /* x = y */
1274 	  add_pred_graph_edge (graph, lhsvar, rhsvar);
1275 	  /* Implicitly, *x = *y */
1276 	  add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1277 				   FIRST_REF_NODE + rhsvar);
1278 	}
1279       else if (lhs.offset != 0 || rhs.offset != 0)
1280 	{
1281 	  if (rhs.offset != 0)
1282 	    bitmap_clear_bit (graph->direct_nodes, lhs.var);
1283 	  else if (lhs.offset != 0)
1284 	    bitmap_clear_bit (graph->direct_nodes, rhs.var);
1285 	}
1286     }
1287 }
1288 
1289 /* Build the constraint graph, adding successor edges.  */
1290 
1291 static void
1292 build_succ_graph (void)
1293 {
1294   unsigned i, t;
1295   constraint_t c;
1296 
1297   FOR_EACH_VEC_ELT (constraints, i, c)
1298     {
1299       struct constraint_expr lhs;
1300       struct constraint_expr rhs;
1301       unsigned int lhsvar;
1302       unsigned int rhsvar;
1303 
1304       if (!c)
1305 	continue;
1306 
1307       lhs = c->lhs;
1308       rhs = c->rhs;
1309       lhsvar = find (lhs.var);
1310       rhsvar = find (rhs.var);
1311 
1312       if (lhs.type == DEREF)
1313 	{
1314 	  if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1315 	    add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1316 	}
1317       else if (rhs.type == DEREF)
1318 	{
1319 	  if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1320 	    add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1321 	}
1322       else if (rhs.type == ADDRESSOF)
1323 	{
1324 	  /* x = &y */
1325 	  gcc_assert (find (rhs.var) == rhs.var);
1326 	  bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1327 	}
1328       else if (lhsvar > anything_id
1329 	       && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1330 	{
1331 	  add_graph_edge (graph, lhsvar, rhsvar);
1332 	}
1333     }
1334 
1335   /* Add edges from STOREDANYTHING to all non-direct nodes that can
1336      receive pointers.  */
1337   t = find (storedanything_id);
1338   for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1339     {
1340       if (!bitmap_bit_p (graph->direct_nodes, i)
1341 	  && get_varinfo (i)->may_have_pointers)
1342 	add_graph_edge (graph, find (i), t);
1343     }
1344 
1345   /* Everything stored to ANYTHING also potentially escapes.  */
1346   add_graph_edge (graph, find (escaped_id), t);
1347 }
1348 
1349 
1350 /* Changed variables on the last iteration.  */
1351 static bitmap changed;
1352 
1353 /* Strongly Connected Component visitation info.  */
1354 
1355 struct scc_info
1356 {
1357   sbitmap visited;
1358   sbitmap deleted;
1359   unsigned int *dfs;
1360   unsigned int *node_mapping;
1361   int current_index;
1362   vec<unsigned> scc_stack;
1363 };
1364 
1365 
1366 /* Recursive routine to find strongly connected components in GRAPH.
1367    SI is the SCC info to store the information in, and N is the id of current
1368    graph node we are processing.
1369 
1370    This is Tarjan's strongly connected component finding algorithm, as
1371    modified by Nuutila to keep only non-root nodes on the stack.
1372    The algorithm can be found in "On finding the strongly connected
1373    connected components in a directed graph" by Esko Nuutila and Eljas
1374    Soisalon-Soininen, in Information Processing Letters volume 49,
1375    number 1, pages 9-14.  */
1376 
1377 static void
1378 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1379 {
1380   unsigned int i;
1381   bitmap_iterator bi;
1382   unsigned int my_dfs;
1383 
1384   bitmap_set_bit (si->visited, n);
1385   si->dfs[n] = si->current_index ++;
1386   my_dfs = si->dfs[n];
1387 
1388   /* Visit all the successors.  */
1389   EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1390     {
1391       unsigned int w;
1392 
1393       if (i > LAST_REF_NODE)
1394 	break;
1395 
1396       w = find (i);
1397       if (bitmap_bit_p (si->deleted, w))
1398 	continue;
1399 
1400       if (!bitmap_bit_p (si->visited, w))
1401 	scc_visit (graph, si, w);
1402       {
1403 	unsigned int t = find (w);
1404 	unsigned int nnode = find (n);
1405 	gcc_assert (nnode == n);
1406 
1407 	if (si->dfs[t] < si->dfs[nnode])
1408 	  si->dfs[n] = si->dfs[t];
1409       }
1410     }
1411 
1412   /* See if any components have been identified.  */
1413   if (si->dfs[n] == my_dfs)
1414     {
1415       if (si->scc_stack.length () > 0
1416 	  && si->dfs[si->scc_stack.last ()] >= my_dfs)
1417 	{
1418 	  bitmap scc = BITMAP_ALLOC (NULL);
1419 	  unsigned int lowest_node;
1420 	  bitmap_iterator bi;
1421 
1422 	  bitmap_set_bit (scc, n);
1423 
1424 	  while (si->scc_stack.length () != 0
1425 		 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1426 	    {
1427 	      unsigned int w = si->scc_stack.pop ();
1428 
1429 	      bitmap_set_bit (scc, w);
1430 	    }
1431 
1432 	  lowest_node = bitmap_first_set_bit (scc);
1433 	  gcc_assert (lowest_node < FIRST_REF_NODE);
1434 
1435 	  /* Collapse the SCC nodes into a single node, and mark the
1436 	     indirect cycles.  */
1437 	  EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1438 	    {
1439 	      if (i < FIRST_REF_NODE)
1440 		{
1441 		  if (unite (lowest_node, i))
1442 		    unify_nodes (graph, lowest_node, i, false);
1443 		}
1444 	      else
1445 		{
1446 		  unite (lowest_node, i);
1447 		  graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1448 		}
1449 	    }
1450 	}
1451       bitmap_set_bit (si->deleted, n);
1452     }
1453   else
1454     si->scc_stack.safe_push (n);
1455 }
1456 
1457 /* Unify node FROM into node TO, updating the changed count if
1458    necessary when UPDATE_CHANGED is true.  */
1459 
1460 static void
1461 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1462 	     bool update_changed)
1463 {
1464 
1465   gcc_assert (to != from && find (to) == to);
1466   if (dump_file && (dump_flags & TDF_DETAILS))
1467     fprintf (dump_file, "Unifying %s to %s\n",
1468 	     get_varinfo (from)->name,
1469 	     get_varinfo (to)->name);
1470 
1471   if (update_changed)
1472     stats.unified_vars_dynamic++;
1473   else
1474     stats.unified_vars_static++;
1475 
1476   merge_graph_nodes (graph, to, from);
1477   merge_node_constraints (graph, to, from);
1478 
1479   /* Mark TO as changed if FROM was changed. If TO was already marked
1480      as changed, decrease the changed count.  */
1481 
1482   if (update_changed
1483       && bitmap_bit_p (changed, from))
1484     {
1485       bitmap_clear_bit (changed, from);
1486       bitmap_set_bit (changed, to);
1487     }
1488   if (get_varinfo (from)->solution)
1489     {
1490       /* If the solution changes because of the merging, we need to mark
1491 	 the variable as changed.  */
1492       if (bitmap_ior_into (get_varinfo (to)->solution,
1493 			   get_varinfo (from)->solution))
1494 	{
1495 	  if (update_changed)
1496 	    bitmap_set_bit (changed, to);
1497 	}
1498 
1499       BITMAP_FREE (get_varinfo (from)->solution);
1500       if (get_varinfo (from)->oldsolution)
1501 	BITMAP_FREE (get_varinfo (from)->oldsolution);
1502 
1503       if (stats.iterations > 0
1504 	  && get_varinfo (to)->oldsolution)
1505 	BITMAP_FREE (get_varinfo (to)->oldsolution);
1506     }
1507   if (valid_graph_edge (graph, to, to))
1508     {
1509       if (graph->succs[to])
1510 	bitmap_clear_bit (graph->succs[to], to);
1511     }
1512 }
1513 
1514 /* Information needed to compute the topological ordering of a graph.  */
1515 
1516 struct topo_info
1517 {
1518   /* sbitmap of visited nodes.  */
1519   sbitmap visited;
1520   /* Array that stores the topological order of the graph, *in
1521      reverse*.  */
1522   vec<unsigned> topo_order;
1523 };
1524 
1525 
1526 /* Initialize and return a topological info structure.  */
1527 
1528 static struct topo_info *
1529 init_topo_info (void)
1530 {
1531   size_t size = graph->size;
1532   struct topo_info *ti = XNEW (struct topo_info);
1533   ti->visited = sbitmap_alloc (size);
1534   bitmap_clear (ti->visited);
1535   ti->topo_order.create (1);
1536   return ti;
1537 }
1538 
1539 
1540 /* Free the topological sort info pointed to by TI.  */
1541 
1542 static void
1543 free_topo_info (struct topo_info *ti)
1544 {
1545   sbitmap_free (ti->visited);
1546   ti->topo_order.release ();
1547   free (ti);
1548 }
1549 
1550 /* Visit the graph in topological order, and store the order in the
1551    topo_info structure.  */
1552 
1553 static void
1554 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1555 	    unsigned int n)
1556 {
1557   bitmap_iterator bi;
1558   unsigned int j;
1559 
1560   bitmap_set_bit (ti->visited, n);
1561 
1562   if (graph->succs[n])
1563     EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1564       {
1565 	if (!bitmap_bit_p (ti->visited, j))
1566 	  topo_visit (graph, ti, j);
1567       }
1568 
1569   ti->topo_order.safe_push (n);
1570 }
1571 
1572 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1573    starting solution for y.  */
1574 
1575 static void
1576 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1577 		  bitmap delta)
1578 {
1579   unsigned int lhs = c->lhs.var;
1580   bool flag = false;
1581   bitmap sol = get_varinfo (lhs)->solution;
1582   unsigned int j;
1583   bitmap_iterator bi;
1584   HOST_WIDE_INT roffset = c->rhs.offset;
1585 
1586   /* Our IL does not allow this.  */
1587   gcc_assert (c->lhs.offset == 0);
1588 
1589   /* If the solution of Y contains anything it is good enough to transfer
1590      this to the LHS.  */
1591   if (bitmap_bit_p (delta, anything_id))
1592     {
1593       flag |= bitmap_set_bit (sol, anything_id);
1594       goto done;
1595     }
1596 
1597   /* If we do not know at with offset the rhs is dereferenced compute
1598      the reachability set of DELTA, conservatively assuming it is
1599      dereferenced at all valid offsets.  */
1600   if (roffset == UNKNOWN_OFFSET)
1601     {
1602       solution_set_expand (delta, delta);
1603       /* No further offset processing is necessary.  */
1604       roffset = 0;
1605     }
1606 
1607   /* For each variable j in delta (Sol(y)), add
1608      an edge in the graph from j to x, and union Sol(j) into Sol(x).  */
1609   EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1610     {
1611       varinfo_t v = get_varinfo (j);
1612       HOST_WIDE_INT fieldoffset = v->offset + roffset;
1613       unsigned HOST_WIDE_INT size = v->size;
1614       unsigned int t;
1615 
1616       if (v->is_full_var)
1617 	;
1618       else if (roffset != 0)
1619 	{
1620 	  if (fieldoffset < 0)
1621 	    v = lookup_vi_for_tree (v->decl);
1622 	  else
1623 	    v = first_or_preceding_vi_for_offset (v, fieldoffset);
1624 	}
1625 
1626       /* We have to include all fields that overlap the current field
1627 	 shifted by roffset.  */
1628       do
1629 	{
1630 	  t = find (v->id);
1631 
1632 	  /* Adding edges from the special vars is pointless.
1633 	     They don't have sets that can change.  */
1634 	  if (get_varinfo (t)->is_special_var)
1635 	    flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1636 	  /* Merging the solution from ESCAPED needlessly increases
1637 	     the set.  Use ESCAPED as representative instead.  */
1638 	  else if (v->id == escaped_id)
1639 	    flag |= bitmap_set_bit (sol, escaped_id);
1640 	  else if (v->may_have_pointers
1641 		   && add_graph_edge (graph, lhs, t))
1642 	    flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1643 
1644 	  if (v->is_full_var
1645 	      || v->next == NULL)
1646 	    break;
1647 
1648 	  v = v->next;
1649 	}
1650       while (v->offset < fieldoffset + size);
1651     }
1652 
1653 done:
1654   /* If the LHS solution changed, mark the var as changed.  */
1655   if (flag)
1656     {
1657       get_varinfo (lhs)->solution = sol;
1658       bitmap_set_bit (changed, lhs);
1659     }
1660 }
1661 
1662 /* Process a constraint C that represents *(x + off) = y using DELTA
1663    as the starting solution for x.  */
1664 
1665 static void
1666 do_ds_constraint (constraint_t c, bitmap delta)
1667 {
1668   unsigned int rhs = c->rhs.var;
1669   bitmap sol = get_varinfo (rhs)->solution;
1670   unsigned int j;
1671   bitmap_iterator bi;
1672   HOST_WIDE_INT loff = c->lhs.offset;
1673   bool escaped_p = false;
1674 
1675   /* Our IL does not allow this.  */
1676   gcc_assert (c->rhs.offset == 0);
1677 
1678   /* If the solution of y contains ANYTHING simply use the ANYTHING
1679      solution.  This avoids needlessly increasing the points-to sets.  */
1680   if (bitmap_bit_p (sol, anything_id))
1681     sol = get_varinfo (find (anything_id))->solution;
1682 
1683   /* If the solution for x contains ANYTHING we have to merge the
1684      solution of y into all pointer variables which we do via
1685      STOREDANYTHING.  */
1686   if (bitmap_bit_p (delta, anything_id))
1687     {
1688       unsigned t = find (storedanything_id);
1689       if (add_graph_edge (graph, t, rhs))
1690 	{
1691 	  if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1692 	    bitmap_set_bit (changed, t);
1693 	}
1694       return;
1695     }
1696 
1697   /* If we do not know at with offset the rhs is dereferenced compute
1698      the reachability set of DELTA, conservatively assuming it is
1699      dereferenced at all valid offsets.  */
1700   if (loff == UNKNOWN_OFFSET)
1701     {
1702       solution_set_expand (delta, delta);
1703       loff = 0;
1704     }
1705 
1706   /* For each member j of delta (Sol(x)), add an edge from y to j and
1707      union Sol(y) into Sol(j) */
1708   EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1709     {
1710       varinfo_t v = get_varinfo (j);
1711       unsigned int t;
1712       HOST_WIDE_INT fieldoffset = v->offset + loff;
1713       unsigned HOST_WIDE_INT size = v->size;
1714 
1715       if (v->is_full_var)
1716 	;
1717       else if (loff != 0)
1718 	{
1719 	  if (fieldoffset < 0)
1720 	    v = lookup_vi_for_tree (v->decl);
1721 	  else
1722 	    v = first_or_preceding_vi_for_offset (v, fieldoffset);
1723 	}
1724 
1725       /* We have to include all fields that overlap the current field
1726 	 shifted by loff.  */
1727       do
1728 	{
1729 	  if (v->may_have_pointers)
1730 	    {
1731 	      /* If v is a global variable then this is an escape point.  */
1732 	      if (v->is_global_var
1733 		  && !escaped_p)
1734 		{
1735 		  t = find (escaped_id);
1736 		  if (add_graph_edge (graph, t, rhs)
1737 		      && bitmap_ior_into (get_varinfo (t)->solution, sol))
1738 		    bitmap_set_bit (changed, t);
1739 		  /* Enough to let rhs escape once.  */
1740 		  escaped_p = true;
1741 		}
1742 
1743 	      if (v->is_special_var)
1744 		break;
1745 
1746 	      t = find (v->id);
1747 	      if (add_graph_edge (graph, t, rhs)
1748 		  && bitmap_ior_into (get_varinfo (t)->solution, sol))
1749 		bitmap_set_bit (changed, t);
1750 	    }
1751 
1752 	  if (v->is_full_var
1753 	      || v->next == NULL)
1754 	    break;
1755 
1756 	  v = v->next;
1757 	}
1758       while (v->offset < fieldoffset + size);
1759     }
1760 }
1761 
1762 /* Handle a non-simple (simple meaning requires no iteration),
1763    constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved).  */
1764 
1765 static void
1766 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1767 {
1768   if (c->lhs.type == DEREF)
1769     {
1770       if (c->rhs.type == ADDRESSOF)
1771 	{
1772 	  gcc_unreachable();
1773 	}
1774       else
1775 	{
1776 	  /* *x = y */
1777 	  do_ds_constraint (c, delta);
1778 	}
1779     }
1780   else if (c->rhs.type == DEREF)
1781     {
1782       /* x = *y */
1783       if (!(get_varinfo (c->lhs.var)->is_special_var))
1784 	do_sd_constraint (graph, c, delta);
1785     }
1786   else
1787     {
1788       bitmap tmp;
1789       bitmap solution;
1790       bool flag = false;
1791 
1792       gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1793       solution = get_varinfo (c->rhs.var)->solution;
1794       tmp = get_varinfo (c->lhs.var)->solution;
1795 
1796       flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1797 
1798       if (flag)
1799 	{
1800 	  get_varinfo (c->lhs.var)->solution = tmp;
1801 	  bitmap_set_bit (changed, c->lhs.var);
1802 	}
1803     }
1804 }
1805 
1806 /* Initialize and return a new SCC info structure.  */
1807 
1808 static struct scc_info *
1809 init_scc_info (size_t size)
1810 {
1811   struct scc_info *si = XNEW (struct scc_info);
1812   size_t i;
1813 
1814   si->current_index = 0;
1815   si->visited = sbitmap_alloc (size);
1816   bitmap_clear (si->visited);
1817   si->deleted = sbitmap_alloc (size);
1818   bitmap_clear (si->deleted);
1819   si->node_mapping = XNEWVEC (unsigned int, size);
1820   si->dfs = XCNEWVEC (unsigned int, size);
1821 
1822   for (i = 0; i < size; i++)
1823     si->node_mapping[i] = i;
1824 
1825   si->scc_stack.create (1);
1826   return si;
1827 }
1828 
1829 /* Free an SCC info structure pointed to by SI */
1830 
1831 static void
1832 free_scc_info (struct scc_info *si)
1833 {
1834   sbitmap_free (si->visited);
1835   sbitmap_free (si->deleted);
1836   free (si->node_mapping);
1837   free (si->dfs);
1838   si->scc_stack.release ();
1839   free (si);
1840 }
1841 
1842 
1843 /* Find indirect cycles in GRAPH that occur, using strongly connected
1844    components, and note them in the indirect cycles map.
1845 
1846    This technique comes from Ben Hardekopf and Calvin Lin,
1847    "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1848    Lines of Code", submitted to PLDI 2007.  */
1849 
1850 static void
1851 find_indirect_cycles (constraint_graph_t graph)
1852 {
1853   unsigned int i;
1854   unsigned int size = graph->size;
1855   struct scc_info *si = init_scc_info (size);
1856 
1857   for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1858     if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1859       scc_visit (graph, si, i);
1860 
1861   free_scc_info (si);
1862 }
1863 
1864 /* Compute a topological ordering for GRAPH, and store the result in the
1865    topo_info structure TI.  */
1866 
1867 static void
1868 compute_topo_order (constraint_graph_t graph,
1869 		    struct topo_info *ti)
1870 {
1871   unsigned int i;
1872   unsigned int size = graph->size;
1873 
1874   for (i = 0; i != size; ++i)
1875     if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1876       topo_visit (graph, ti, i);
1877 }
1878 
1879 /* Structure used to for hash value numbering of pointer equivalence
1880    classes.  */
1881 
1882 typedef struct equiv_class_label
1883 {
1884   hashval_t hashcode;
1885   unsigned int equivalence_class;
1886   bitmap labels;
1887 } *equiv_class_label_t;
1888 typedef const struct equiv_class_label *const_equiv_class_label_t;
1889 
1890 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1891    classes.  */
1892 static htab_t pointer_equiv_class_table;
1893 
1894 /* A hashtable for mapping a bitmap of labels->location equivalence
1895    classes.  */
1896 static htab_t location_equiv_class_table;
1897 
1898 /* Hash function for a equiv_class_label_t */
1899 
1900 static hashval_t
1901 equiv_class_label_hash (const void *p)
1902 {
1903   const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1904   return ecl->hashcode;
1905 }
1906 
1907 /* Equality function for two equiv_class_label_t's.  */
1908 
1909 static int
1910 equiv_class_label_eq (const void *p1, const void *p2)
1911 {
1912   const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1913   const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1914   return (eql1->hashcode == eql2->hashcode
1915 	  && bitmap_equal_p (eql1->labels, eql2->labels));
1916 }
1917 
1918 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1919    hash HAS it contains.  Sets *REF_LABELS to the bitmap LABELS
1920    is equivalent to.  */
1921 
1922 static equiv_class_label *
1923 equiv_class_lookup_or_add (htab_t table, bitmap labels)
1924 {
1925   equiv_class_label **slot;
1926   equiv_class_label ecl;
1927 
1928   ecl.labels = labels;
1929   ecl.hashcode = bitmap_hash (labels);
1930   slot = (equiv_class_label **) htab_find_slot_with_hash (table, &ecl,
1931 							  ecl.hashcode, INSERT);
1932   if (!*slot)
1933     {
1934       *slot = XNEW (struct equiv_class_label);
1935       (*slot)->labels = labels;
1936       (*slot)->hashcode = ecl.hashcode;
1937       (*slot)->equivalence_class = 0;
1938     }
1939 
1940   return *slot;
1941 }
1942 
1943 /* Perform offline variable substitution.
1944 
1945    This is a worst case quadratic time way of identifying variables
1946    that must have equivalent points-to sets, including those caused by
1947    static cycles, and single entry subgraphs, in the constraint graph.
1948 
1949    The technique is described in "Exploiting Pointer and Location
1950    Equivalence to Optimize Pointer Analysis. In the 14th International
1951    Static Analysis Symposium (SAS), August 2007."  It is known as the
1952    "HU" algorithm, and is equivalent to value numbering the collapsed
1953    constraint graph including evaluating unions.
1954 
1955    The general method of finding equivalence classes is as follows:
1956    Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1957    Initialize all non-REF nodes to be direct nodes.
1958    For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1959    variable}
1960    For each constraint containing the dereference, we also do the same
1961    thing.
1962 
1963    We then compute SCC's in the graph and unify nodes in the same SCC,
1964    including pts sets.
1965 
1966    For each non-collapsed node x:
1967     Visit all unvisited explicit incoming edges.
1968     Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1969     where y->x.
1970     Lookup the equivalence class for pts(x).
1971      If we found one, equivalence_class(x) = found class.
1972      Otherwise, equivalence_class(x) = new class, and new_class is
1973     added to the lookup table.
1974 
1975    All direct nodes with the same equivalence class can be replaced
1976    with a single representative node.
1977    All unlabeled nodes (label == 0) are not pointers and all edges
1978    involving them can be eliminated.
1979    We perform these optimizations during rewrite_constraints
1980 
1981    In addition to pointer equivalence class finding, we also perform
1982    location equivalence class finding.  This is the set of variables
1983    that always appear together in points-to sets.  We use this to
1984    compress the size of the points-to sets.  */
1985 
1986 /* Current maximum pointer equivalence class id.  */
1987 static int pointer_equiv_class;
1988 
1989 /* Current maximum location equivalence class id.  */
1990 static int location_equiv_class;
1991 
1992 /* Recursive routine to find strongly connected components in GRAPH,
1993    and label it's nodes with DFS numbers.  */
1994 
1995 static void
1996 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1997 {
1998   unsigned int i;
1999   bitmap_iterator bi;
2000   unsigned int my_dfs;
2001 
2002   gcc_assert (si->node_mapping[n] == n);
2003   bitmap_set_bit (si->visited, n);
2004   si->dfs[n] = si->current_index ++;
2005   my_dfs = si->dfs[n];
2006 
2007   /* Visit all the successors.  */
2008   EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2009     {
2010       unsigned int w = si->node_mapping[i];
2011 
2012       if (bitmap_bit_p (si->deleted, w))
2013 	continue;
2014 
2015       if (!bitmap_bit_p (si->visited, w))
2016 	condense_visit (graph, si, w);
2017       {
2018 	unsigned int t = si->node_mapping[w];
2019 	unsigned int nnode = si->node_mapping[n];
2020 	gcc_assert (nnode == n);
2021 
2022 	if (si->dfs[t] < si->dfs[nnode])
2023 	  si->dfs[n] = si->dfs[t];
2024       }
2025     }
2026 
2027   /* Visit all the implicit predecessors.  */
2028   EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2029     {
2030       unsigned int w = si->node_mapping[i];
2031 
2032       if (bitmap_bit_p (si->deleted, w))
2033 	continue;
2034 
2035       if (!bitmap_bit_p (si->visited, w))
2036 	condense_visit (graph, si, w);
2037       {
2038 	unsigned int t = si->node_mapping[w];
2039 	unsigned int nnode = si->node_mapping[n];
2040 	gcc_assert (nnode == n);
2041 
2042 	if (si->dfs[t] < si->dfs[nnode])
2043 	  si->dfs[n] = si->dfs[t];
2044       }
2045     }
2046 
2047   /* See if any components have been identified.  */
2048   if (si->dfs[n] == my_dfs)
2049     {
2050       while (si->scc_stack.length () != 0
2051 	     && si->dfs[si->scc_stack.last ()] >= my_dfs)
2052 	{
2053 	  unsigned int w = si->scc_stack.pop ();
2054 	  si->node_mapping[w] = n;
2055 
2056 	  if (!bitmap_bit_p (graph->direct_nodes, w))
2057 	    bitmap_clear_bit (graph->direct_nodes, n);
2058 
2059 	  /* Unify our nodes.  */
2060 	  if (graph->preds[w])
2061 	    {
2062 	      if (!graph->preds[n])
2063 		graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2064 	      bitmap_ior_into (graph->preds[n], graph->preds[w]);
2065 	    }
2066 	  if (graph->implicit_preds[w])
2067 	    {
2068 	      if (!graph->implicit_preds[n])
2069 		graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2070 	      bitmap_ior_into (graph->implicit_preds[n],
2071 			       graph->implicit_preds[w]);
2072 	    }
2073 	  if (graph->points_to[w])
2074 	    {
2075 	      if (!graph->points_to[n])
2076 		graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2077 	      bitmap_ior_into (graph->points_to[n],
2078 			       graph->points_to[w]);
2079 	    }
2080 	}
2081       bitmap_set_bit (si->deleted, n);
2082     }
2083   else
2084     si->scc_stack.safe_push (n);
2085 }
2086 
2087 /* Label pointer equivalences.  */
2088 
2089 static void
2090 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2091 {
2092   unsigned int i, first_pred;
2093   bitmap_iterator bi;
2094 
2095   bitmap_set_bit (si->visited, n);
2096 
2097   /* Label and union our incoming edges's points to sets.  */
2098   first_pred = -1U;
2099   EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2100     {
2101       unsigned int w = si->node_mapping[i];
2102       if (!bitmap_bit_p (si->visited, w))
2103 	label_visit (graph, si, w);
2104 
2105       /* Skip unused edges  */
2106       if (w == n || graph->pointer_label[w] == 0)
2107 	continue;
2108 
2109       if (graph->points_to[w])
2110 	{
2111 	  if (!graph->points_to[n])
2112 	    {
2113 	      if (first_pred == -1U)
2114 		first_pred = w;
2115 	      else
2116 		{
2117 		  graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2118 		  bitmap_ior (graph->points_to[n],
2119 			      graph->points_to[first_pred],
2120 			      graph->points_to[w]);
2121 		}
2122 	    }
2123 	  else
2124 	    bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2125 	}
2126     }
2127 
2128   /* Indirect nodes get fresh variables and a new pointer equiv class.  */
2129   if (!bitmap_bit_p (graph->direct_nodes, n))
2130     {
2131       if (!graph->points_to[n])
2132 	{
2133 	  graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2134 	  if (first_pred != -1U)
2135 	    bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2136 	}
2137       bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2138       graph->pointer_label[n] = pointer_equiv_class++;
2139       equiv_class_label_t ecl;
2140       ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2141 				       graph->points_to[n]);
2142       ecl->equivalence_class = graph->pointer_label[n];
2143       return;
2144     }
2145 
2146   /* If there was only a single non-empty predecessor the pointer equiv
2147      class is the same.  */
2148   if (!graph->points_to[n])
2149     {
2150       if (first_pred != -1U)
2151 	{
2152 	  graph->pointer_label[n] = graph->pointer_label[first_pred];
2153 	  graph->points_to[n] = graph->points_to[first_pred];
2154 	}
2155       return;
2156     }
2157 
2158   if (!bitmap_empty_p (graph->points_to[n]))
2159     {
2160       equiv_class_label_t ecl;
2161       ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2162 				       graph->points_to[n]);
2163       if (ecl->equivalence_class == 0)
2164 	ecl->equivalence_class = pointer_equiv_class++;
2165       else
2166 	{
2167 	  BITMAP_FREE (graph->points_to[n]);
2168 	  graph->points_to[n] = ecl->labels;
2169 	}
2170       graph->pointer_label[n] = ecl->equivalence_class;
2171     }
2172 }
2173 
2174 /* Perform offline variable substitution, discovering equivalence
2175    classes, and eliminating non-pointer variables.  */
2176 
2177 static struct scc_info *
2178 perform_var_substitution (constraint_graph_t graph)
2179 {
2180   unsigned int i;
2181   unsigned int size = graph->size;
2182   struct scc_info *si = init_scc_info (size);
2183 
2184   bitmap_obstack_initialize (&iteration_obstack);
2185   pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2186 					   equiv_class_label_eq, free);
2187   location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2188 					    equiv_class_label_eq, free);
2189   pointer_equiv_class = 1;
2190   location_equiv_class = 1;
2191 
2192   /* Condense the nodes, which means to find SCC's, count incoming
2193      predecessors, and unite nodes in SCC's.  */
2194   for (i = 0; i < FIRST_REF_NODE; i++)
2195     if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2196       condense_visit (graph, si, si->node_mapping[i]);
2197 
2198   bitmap_clear (si->visited);
2199   /* Actually the label the nodes for pointer equivalences  */
2200   for (i = 0; i < FIRST_REF_NODE; i++)
2201     if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2202       label_visit (graph, si, si->node_mapping[i]);
2203 
2204   /* Calculate location equivalence labels.  */
2205   for (i = 0; i < FIRST_REF_NODE; i++)
2206     {
2207       bitmap pointed_by;
2208       bitmap_iterator bi;
2209       unsigned int j;
2210 
2211       if (!graph->pointed_by[i])
2212 	continue;
2213       pointed_by = BITMAP_ALLOC (&iteration_obstack);
2214 
2215       /* Translate the pointed-by mapping for pointer equivalence
2216 	 labels.  */
2217       EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2218 	{
2219 	  bitmap_set_bit (pointed_by,
2220 			  graph->pointer_label[si->node_mapping[j]]);
2221 	}
2222       /* The original pointed_by is now dead.  */
2223       BITMAP_FREE (graph->pointed_by[i]);
2224 
2225       /* Look up the location equivalence label if one exists, or make
2226 	 one otherwise.  */
2227       equiv_class_label_t ecl;
2228       ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2229       if (ecl->equivalence_class == 0)
2230 	ecl->equivalence_class = location_equiv_class++;
2231       else
2232 	{
2233 	  if (dump_file && (dump_flags & TDF_DETAILS))
2234 	    fprintf (dump_file, "Found location equivalence for node %s\n",
2235 		     get_varinfo (i)->name);
2236 	  BITMAP_FREE (pointed_by);
2237 	}
2238       graph->loc_label[i] = ecl->equivalence_class;
2239 
2240     }
2241 
2242   if (dump_file && (dump_flags & TDF_DETAILS))
2243     for (i = 0; i < FIRST_REF_NODE; i++)
2244       {
2245 	unsigned j = si->node_mapping[i];
2246 	if (j != i)
2247 	  {
2248 	    fprintf (dump_file, "%s node id %d ",
2249 		     bitmap_bit_p (graph->direct_nodes, i)
2250 		     ? "Direct" : "Indirect", i);
2251 	    if (i < FIRST_REF_NODE)
2252 	      fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2253 	    else
2254 	      fprintf (dump_file, "\"*%s\"",
2255 		       get_varinfo (i - FIRST_REF_NODE)->name);
2256 	    fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2257 	    if (j < FIRST_REF_NODE)
2258 	      fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2259 	    else
2260 	      fprintf (dump_file, "\"*%s\"\n",
2261 		       get_varinfo (j - FIRST_REF_NODE)->name);
2262 	  }
2263 	else
2264 	  {
2265 	    fprintf (dump_file,
2266 		     "Equivalence classes for %s node id %d ",
2267 		     bitmap_bit_p (graph->direct_nodes, i)
2268 		     ? "direct" : "indirect", i);
2269 	    if (i < FIRST_REF_NODE)
2270 	      fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2271 	    else
2272 	      fprintf (dump_file, "\"*%s\"",
2273 		       get_varinfo (i - FIRST_REF_NODE)->name);
2274 	    fprintf (dump_file,
2275 		     ": pointer %d, location %d\n",
2276 		     graph->pointer_label[i], graph->loc_label[i]);
2277 	  }
2278       }
2279 
2280   /* Quickly eliminate our non-pointer variables.  */
2281 
2282   for (i = 0; i < FIRST_REF_NODE; i++)
2283     {
2284       unsigned int node = si->node_mapping[i];
2285 
2286       if (graph->pointer_label[node] == 0)
2287 	{
2288 	  if (dump_file && (dump_flags & TDF_DETAILS))
2289 	    fprintf (dump_file,
2290 		     "%s is a non-pointer variable, eliminating edges.\n",
2291 		     get_varinfo (node)->name);
2292 	  stats.nonpointer_vars++;
2293 	  clear_edges_for_node (graph, node);
2294 	}
2295     }
2296 
2297   return si;
2298 }
2299 
2300 /* Free information that was only necessary for variable
2301    substitution.  */
2302 
2303 static void
2304 free_var_substitution_info (struct scc_info *si)
2305 {
2306   free_scc_info (si);
2307   free (graph->pointer_label);
2308   free (graph->loc_label);
2309   free (graph->pointed_by);
2310   free (graph->points_to);
2311   free (graph->eq_rep);
2312   sbitmap_free (graph->direct_nodes);
2313   htab_delete (pointer_equiv_class_table);
2314   htab_delete (location_equiv_class_table);
2315   bitmap_obstack_release (&iteration_obstack);
2316 }
2317 
2318 /* Return an existing node that is equivalent to NODE, which has
2319    equivalence class LABEL, if one exists.  Return NODE otherwise.  */
2320 
2321 static unsigned int
2322 find_equivalent_node (constraint_graph_t graph,
2323 		      unsigned int node, unsigned int label)
2324 {
2325   /* If the address version of this variable is unused, we can
2326      substitute it for anything else with the same label.
2327      Otherwise, we know the pointers are equivalent, but not the
2328      locations, and we can unite them later.  */
2329 
2330   if (!bitmap_bit_p (graph->address_taken, node))
2331     {
2332       gcc_assert (label < graph->size);
2333 
2334       if (graph->eq_rep[label] != -1)
2335 	{
2336 	  /* Unify the two variables since we know they are equivalent.  */
2337 	  if (unite (graph->eq_rep[label], node))
2338 	    unify_nodes (graph, graph->eq_rep[label], node, false);
2339 	  return graph->eq_rep[label];
2340 	}
2341       else
2342 	{
2343 	  graph->eq_rep[label] = node;
2344 	  graph->pe_rep[label] = node;
2345 	}
2346     }
2347   else
2348     {
2349       gcc_assert (label < graph->size);
2350       graph->pe[node] = label;
2351       if (graph->pe_rep[label] == -1)
2352 	graph->pe_rep[label] = node;
2353     }
2354 
2355   return node;
2356 }
2357 
2358 /* Unite pointer equivalent but not location equivalent nodes in
2359    GRAPH.  This may only be performed once variable substitution is
2360    finished.  */
2361 
2362 static void
2363 unite_pointer_equivalences (constraint_graph_t graph)
2364 {
2365   unsigned int i;
2366 
2367   /* Go through the pointer equivalences and unite them to their
2368      representative, if they aren't already.  */
2369   for (i = 0; i < FIRST_REF_NODE; i++)
2370     {
2371       unsigned int label = graph->pe[i];
2372       if (label)
2373 	{
2374 	  int label_rep = graph->pe_rep[label];
2375 
2376 	  if (label_rep == -1)
2377 	    continue;
2378 
2379 	  label_rep = find (label_rep);
2380 	  if (label_rep >= 0 && unite (label_rep, find (i)))
2381 	    unify_nodes (graph, label_rep, i, false);
2382 	}
2383     }
2384 }
2385 
2386 /* Move complex constraints to the GRAPH nodes they belong to.  */
2387 
2388 static void
2389 move_complex_constraints (constraint_graph_t graph)
2390 {
2391   int i;
2392   constraint_t c;
2393 
2394   FOR_EACH_VEC_ELT (constraints, i, c)
2395     {
2396       if (c)
2397 	{
2398 	  struct constraint_expr lhs = c->lhs;
2399 	  struct constraint_expr rhs = c->rhs;
2400 
2401 	  if (lhs.type == DEREF)
2402 	    {
2403 	      insert_into_complex (graph, lhs.var, c);
2404 	    }
2405 	  else if (rhs.type == DEREF)
2406 	    {
2407 	      if (!(get_varinfo (lhs.var)->is_special_var))
2408 		insert_into_complex (graph, rhs.var, c);
2409 	    }
2410 	  else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2411 		   && (lhs.offset != 0 || rhs.offset != 0))
2412 	    {
2413 	      insert_into_complex (graph, rhs.var, c);
2414 	    }
2415 	}
2416     }
2417 }
2418 
2419 
2420 /* Optimize and rewrite complex constraints while performing
2421    collapsing of equivalent nodes.  SI is the SCC_INFO that is the
2422    result of perform_variable_substitution.  */
2423 
2424 static void
2425 rewrite_constraints (constraint_graph_t graph,
2426 		     struct scc_info *si)
2427 {
2428   int i;
2429   unsigned int j;
2430   constraint_t c;
2431 
2432   for (j = 0; j < graph->size; j++)
2433     gcc_assert (find (j) == j);
2434 
2435   FOR_EACH_VEC_ELT (constraints, i, c)
2436     {
2437       struct constraint_expr lhs = c->lhs;
2438       struct constraint_expr rhs = c->rhs;
2439       unsigned int lhsvar = find (lhs.var);
2440       unsigned int rhsvar = find (rhs.var);
2441       unsigned int lhsnode, rhsnode;
2442       unsigned int lhslabel, rhslabel;
2443 
2444       lhsnode = si->node_mapping[lhsvar];
2445       rhsnode = si->node_mapping[rhsvar];
2446       lhslabel = graph->pointer_label[lhsnode];
2447       rhslabel = graph->pointer_label[rhsnode];
2448 
2449       /* See if it is really a non-pointer variable, and if so, ignore
2450 	 the constraint.  */
2451       if (lhslabel == 0)
2452 	{
2453 	  if (dump_file && (dump_flags & TDF_DETAILS))
2454 	    {
2455 
2456 	      fprintf (dump_file, "%s is a non-pointer variable,"
2457 		       "ignoring constraint:",
2458 		       get_varinfo (lhs.var)->name);
2459 	      dump_constraint (dump_file, c);
2460 	      fprintf (dump_file, "\n");
2461 	    }
2462 	  constraints[i] = NULL;
2463 	  continue;
2464 	}
2465 
2466       if (rhslabel == 0)
2467 	{
2468 	  if (dump_file && (dump_flags & TDF_DETAILS))
2469 	    {
2470 
2471 	      fprintf (dump_file, "%s is a non-pointer variable,"
2472 		       "ignoring constraint:",
2473 		       get_varinfo (rhs.var)->name);
2474 	      dump_constraint (dump_file, c);
2475 	      fprintf (dump_file, "\n");
2476 	    }
2477 	  constraints[i] = NULL;
2478 	  continue;
2479 	}
2480 
2481       lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2482       rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2483       c->lhs.var = lhsvar;
2484       c->rhs.var = rhsvar;
2485 
2486     }
2487 }
2488 
2489 /* Eliminate indirect cycles involving NODE.  Return true if NODE was
2490    part of an SCC, false otherwise.  */
2491 
2492 static bool
2493 eliminate_indirect_cycles (unsigned int node)
2494 {
2495   if (graph->indirect_cycles[node] != -1
2496       && !bitmap_empty_p (get_varinfo (node)->solution))
2497     {
2498       unsigned int i;
2499       vec<unsigned> queue = vNULL;
2500       int queuepos;
2501       unsigned int to = find (graph->indirect_cycles[node]);
2502       bitmap_iterator bi;
2503 
2504       /* We can't touch the solution set and call unify_nodes
2505 	 at the same time, because unify_nodes is going to do
2506 	 bitmap unions into it. */
2507 
2508       EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2509 	{
2510 	  if (find (i) == i && i != to)
2511 	    {
2512 	      if (unite (to, i))
2513 		queue.safe_push (i);
2514 	    }
2515 	}
2516 
2517       for (queuepos = 0;
2518 	   queue.iterate (queuepos, &i);
2519 	   queuepos++)
2520 	{
2521 	  unify_nodes (graph, to, i, true);
2522 	}
2523       queue.release ();
2524       return true;
2525     }
2526   return false;
2527 }
2528 
2529 /* Solve the constraint graph GRAPH using our worklist solver.
2530    This is based on the PW* family of solvers from the "Efficient Field
2531    Sensitive Pointer Analysis for C" paper.
2532    It works by iterating over all the graph nodes, processing the complex
2533    constraints and propagating the copy constraints, until everything stops
2534    changed.  This corresponds to steps 6-8 in the solving list given above.  */
2535 
2536 static void
2537 solve_graph (constraint_graph_t graph)
2538 {
2539   unsigned int size = graph->size;
2540   unsigned int i;
2541   bitmap pts;
2542 
2543   changed = BITMAP_ALLOC (NULL);
2544 
2545   /* Mark all initial non-collapsed nodes as changed.  */
2546   for (i = 0; i < size; i++)
2547     {
2548       varinfo_t ivi = get_varinfo (i);
2549       if (find (i) == i && !bitmap_empty_p (ivi->solution)
2550 	  && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2551 	      || graph->complex[i].length () > 0))
2552 	bitmap_set_bit (changed, i);
2553     }
2554 
2555   /* Allocate a bitmap to be used to store the changed bits.  */
2556   pts = BITMAP_ALLOC (&pta_obstack);
2557 
2558   while (!bitmap_empty_p (changed))
2559     {
2560       unsigned int i;
2561       struct topo_info *ti = init_topo_info ();
2562       stats.iterations++;
2563 
2564       bitmap_obstack_initialize (&iteration_obstack);
2565 
2566       compute_topo_order (graph, ti);
2567 
2568       while (ti->topo_order.length () != 0)
2569 	{
2570 
2571 	  i = ti->topo_order.pop ();
2572 
2573 	  /* If this variable is not a representative, skip it.  */
2574 	  if (find (i) != i)
2575 	    continue;
2576 
2577 	  /* In certain indirect cycle cases, we may merge this
2578 	     variable to another.  */
2579 	  if (eliminate_indirect_cycles (i) && find (i) != i)
2580 	    continue;
2581 
2582 	  /* If the node has changed, we need to process the
2583 	     complex constraints and outgoing edges again.  */
2584 	  if (bitmap_clear_bit (changed, i))
2585 	    {
2586 	      unsigned int j;
2587 	      constraint_t c;
2588 	      bitmap solution;
2589 	      vec<constraint_t> complex = graph->complex[i];
2590 	      varinfo_t vi = get_varinfo (i);
2591 	      bool solution_empty;
2592 
2593 	      /* Compute the changed set of solution bits.  */
2594 	      if (vi->oldsolution)
2595 		bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2596 	      else
2597 		bitmap_copy (pts, vi->solution);
2598 
2599 	      if (bitmap_empty_p (pts))
2600 		continue;
2601 
2602 	      if (vi->oldsolution)
2603 		bitmap_ior_into (vi->oldsolution, pts);
2604 	      else
2605 		{
2606 		  vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2607 		  bitmap_copy (vi->oldsolution, pts);
2608 		}
2609 
2610 	      solution = vi->solution;
2611 	      solution_empty = bitmap_empty_p (solution);
2612 
2613 	      /* Process the complex constraints */
2614 	      FOR_EACH_VEC_ELT (complex, j, c)
2615 		{
2616 		  /* XXX: This is going to unsort the constraints in
2617 		     some cases, which will occasionally add duplicate
2618 		     constraints during unification.  This does not
2619 		     affect correctness.  */
2620 		  c->lhs.var = find (c->lhs.var);
2621 		  c->rhs.var = find (c->rhs.var);
2622 
2623 		  /* The only complex constraint that can change our
2624 		     solution to non-empty, given an empty solution,
2625 		     is a constraint where the lhs side is receiving
2626 		     some set from elsewhere.  */
2627 		  if (!solution_empty || c->lhs.type != DEREF)
2628 		    do_complex_constraint (graph, c, pts);
2629 		}
2630 
2631 	      solution_empty = bitmap_empty_p (solution);
2632 
2633 	      if (!solution_empty)
2634 		{
2635 		  bitmap_iterator bi;
2636 		  unsigned eff_escaped_id = find (escaped_id);
2637 
2638 		  /* Propagate solution to all successors.  */
2639 		  EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2640 						0, j, bi)
2641 		    {
2642 		      bitmap tmp;
2643 		      bool flag;
2644 
2645 		      unsigned int to = find (j);
2646 		      tmp = get_varinfo (to)->solution;
2647 		      flag = false;
2648 
2649 		      /* Don't try to propagate to ourselves.  */
2650 		      if (to == i)
2651 			continue;
2652 
2653 		      /* If we propagate from ESCAPED use ESCAPED as
2654 		         placeholder.  */
2655 		      if (i == eff_escaped_id)
2656 			flag = bitmap_set_bit (tmp, escaped_id);
2657 		      else
2658 			flag = set_union_with_increment (tmp, pts, 0);
2659 
2660 		      if (flag)
2661 			{
2662 			  get_varinfo (to)->solution = tmp;
2663 			  bitmap_set_bit (changed, to);
2664 			}
2665 		    }
2666 		}
2667 	    }
2668 	}
2669       free_topo_info (ti);
2670       bitmap_obstack_release (&iteration_obstack);
2671     }
2672 
2673   BITMAP_FREE (pts);
2674   BITMAP_FREE (changed);
2675   bitmap_obstack_release (&oldpta_obstack);
2676 }
2677 
2678 /* Map from trees to variable infos.  */
2679 static struct pointer_map_t *vi_for_tree;
2680 
2681 
2682 /* Insert ID as the variable id for tree T in the vi_for_tree map.  */
2683 
2684 static void
2685 insert_vi_for_tree (tree t, varinfo_t vi)
2686 {
2687   void **slot = pointer_map_insert (vi_for_tree, t);
2688   gcc_assert (vi);
2689   gcc_assert (*slot == NULL);
2690   *slot = vi;
2691 }
2692 
2693 /* Find the variable info for tree T in VI_FOR_TREE.  If T does not
2694    exist in the map, return NULL, otherwise, return the varinfo we found.  */
2695 
2696 static varinfo_t
2697 lookup_vi_for_tree (tree t)
2698 {
2699   void **slot = pointer_map_contains (vi_for_tree, t);
2700   if (slot == NULL)
2701     return NULL;
2702 
2703   return (varinfo_t) *slot;
2704 }
2705 
2706 /* Return a printable name for DECL  */
2707 
2708 static const char *
2709 alias_get_name (tree decl)
2710 {
2711   const char *res = NULL;
2712   char *temp;
2713   int num_printed = 0;
2714 
2715   if (!dump_file)
2716     return "NULL";
2717 
2718   if (TREE_CODE (decl) == SSA_NAME)
2719     {
2720       res = get_name (decl);
2721       if (res)
2722 	num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2723       else
2724 	num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2725       if (num_printed > 0)
2726 	{
2727 	  res = ggc_strdup (temp);
2728 	  free (temp);
2729 	}
2730     }
2731   else if (DECL_P (decl))
2732     {
2733       if (DECL_ASSEMBLER_NAME_SET_P (decl))
2734 	res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2735       else
2736 	{
2737 	  res = get_name (decl);
2738 	  if (!res)
2739 	    {
2740 	      num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2741 	      if (num_printed > 0)
2742 		{
2743 		  res = ggc_strdup (temp);
2744 		  free (temp);
2745 		}
2746 	    }
2747 	}
2748     }
2749   if (res != NULL)
2750     return res;
2751 
2752   return "NULL";
2753 }
2754 
2755 /* Find the variable id for tree T in the map.
2756    If T doesn't exist in the map, create an entry for it and return it.  */
2757 
2758 static varinfo_t
2759 get_vi_for_tree (tree t)
2760 {
2761   void **slot = pointer_map_contains (vi_for_tree, t);
2762   if (slot == NULL)
2763     return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2764 
2765   return (varinfo_t) *slot;
2766 }
2767 
2768 /* Get a scalar constraint expression for a new temporary variable.  */
2769 
2770 static struct constraint_expr
2771 new_scalar_tmp_constraint_exp (const char *name)
2772 {
2773   struct constraint_expr tmp;
2774   varinfo_t vi;
2775 
2776   vi = new_var_info (NULL_TREE, name);
2777   vi->offset = 0;
2778   vi->size = -1;
2779   vi->fullsize = -1;
2780   vi->is_full_var = 1;
2781 
2782   tmp.var = vi->id;
2783   tmp.type = SCALAR;
2784   tmp.offset = 0;
2785 
2786   return tmp;
2787 }
2788 
2789 /* Get a constraint expression vector from an SSA_VAR_P node.
2790    If address_p is true, the result will be taken its address of.  */
2791 
2792 static void
2793 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2794 {
2795   struct constraint_expr cexpr;
2796   varinfo_t vi;
2797 
2798   /* We allow FUNCTION_DECLs here even though it doesn't make much sense.  */
2799   gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2800 
2801   /* For parameters, get at the points-to set for the actual parm
2802      decl.  */
2803   if (TREE_CODE (t) == SSA_NAME
2804       && SSA_NAME_IS_DEFAULT_DEF (t)
2805       && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2806 	  || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2807     {
2808       get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2809       return;
2810     }
2811 
2812   /* For global variables resort to the alias target.  */
2813   if (TREE_CODE (t) == VAR_DECL
2814       && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2815     {
2816       struct varpool_node *node = varpool_get_node (t);
2817       if (node && node->alias)
2818 	{
2819 	  node = varpool_variable_node (node, NULL);
2820 	  t = node->symbol.decl;
2821 	}
2822     }
2823 
2824   vi = get_vi_for_tree (t);
2825   cexpr.var = vi->id;
2826   cexpr.type = SCALAR;
2827   cexpr.offset = 0;
2828   /* If we determine the result is "anything", and we know this is readonly,
2829      say it points to readonly memory instead.  */
2830   if (cexpr.var == anything_id && TREE_READONLY (t))
2831     {
2832       gcc_unreachable ();
2833       cexpr.type = ADDRESSOF;
2834       cexpr.var = readonly_id;
2835     }
2836 
2837   /* If we are not taking the address of the constraint expr, add all
2838      sub-fiels of the variable as well.  */
2839   if (!address_p
2840       && !vi->is_full_var)
2841     {
2842       for (; vi; vi = vi->next)
2843 	{
2844 	  cexpr.var = vi->id;
2845 	  results->safe_push (cexpr);
2846 	}
2847       return;
2848     }
2849 
2850   results->safe_push (cexpr);
2851 }
2852 
2853 /* Process constraint T, performing various simplifications and then
2854    adding it to our list of overall constraints.  */
2855 
2856 static void
2857 process_constraint (constraint_t t)
2858 {
2859   struct constraint_expr rhs = t->rhs;
2860   struct constraint_expr lhs = t->lhs;
2861 
2862   gcc_assert (rhs.var < varmap.length ());
2863   gcc_assert (lhs.var < varmap.length ());
2864 
2865   /* If we didn't get any useful constraint from the lhs we get
2866      &ANYTHING as fallback from get_constraint_for.  Deal with
2867      it here by turning it into *ANYTHING.  */
2868   if (lhs.type == ADDRESSOF
2869       && lhs.var == anything_id)
2870     lhs.type = DEREF;
2871 
2872   /* ADDRESSOF on the lhs is invalid.  */
2873   gcc_assert (lhs.type != ADDRESSOF);
2874 
2875   /* We shouldn't add constraints from things that cannot have pointers.
2876      It's not completely trivial to avoid in the callers, so do it here.  */
2877   if (rhs.type != ADDRESSOF
2878       && !get_varinfo (rhs.var)->may_have_pointers)
2879     return;
2880 
2881   /* Likewise adding to the solution of a non-pointer var isn't useful.  */
2882   if (!get_varinfo (lhs.var)->may_have_pointers)
2883     return;
2884 
2885   /* This can happen in our IR with things like n->a = *p */
2886   if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2887     {
2888       /* Split into tmp = *rhs, *lhs = tmp */
2889       struct constraint_expr tmplhs;
2890       tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2891       process_constraint (new_constraint (tmplhs, rhs));
2892       process_constraint (new_constraint (lhs, tmplhs));
2893     }
2894   else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2895     {
2896       /* Split into tmp = &rhs, *lhs = tmp */
2897       struct constraint_expr tmplhs;
2898       tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2899       process_constraint (new_constraint (tmplhs, rhs));
2900       process_constraint (new_constraint (lhs, tmplhs));
2901     }
2902   else
2903     {
2904       gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2905       constraints.safe_push (t);
2906     }
2907 }
2908 
2909 
2910 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2911    structure.  */
2912 
2913 static HOST_WIDE_INT
2914 bitpos_of_field (const tree fdecl)
2915 {
2916   if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2917       || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2918     return -1;
2919 
2920   return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2921 	  + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2922 }
2923 
2924 
2925 /* Get constraint expressions for offsetting PTR by OFFSET.  Stores the
2926    resulting constraint expressions in *RESULTS.  */
2927 
2928 static void
2929 get_constraint_for_ptr_offset (tree ptr, tree offset,
2930 			       vec<ce_s> *results)
2931 {
2932   struct constraint_expr c;
2933   unsigned int j, n;
2934   HOST_WIDE_INT rhsoffset;
2935 
2936   /* If we do not do field-sensitive PTA adding offsets to pointers
2937      does not change the points-to solution.  */
2938   if (!use_field_sensitive)
2939     {
2940       get_constraint_for_rhs (ptr, results);
2941       return;
2942     }
2943 
2944   /* If the offset is not a non-negative integer constant that fits
2945      in a HOST_WIDE_INT, we have to fall back to a conservative
2946      solution which includes all sub-fields of all pointed-to
2947      variables of ptr.  */
2948   if (offset == NULL_TREE
2949       || TREE_CODE (offset) != INTEGER_CST)
2950     rhsoffset = UNKNOWN_OFFSET;
2951   else
2952     {
2953       /* Sign-extend the offset.  */
2954       double_int soffset = tree_to_double_int (offset)
2955 			   .sext (TYPE_PRECISION (TREE_TYPE (offset)));
2956       if (!soffset.fits_shwi ())
2957 	rhsoffset = UNKNOWN_OFFSET;
2958       else
2959 	{
2960 	  /* Make sure the bit-offset also fits.  */
2961 	  HOST_WIDE_INT rhsunitoffset = soffset.low;
2962 	  rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2963 	  if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2964 	    rhsoffset = UNKNOWN_OFFSET;
2965 	}
2966     }
2967 
2968   get_constraint_for_rhs (ptr, results);
2969   if (rhsoffset == 0)
2970     return;
2971 
2972   /* As we are eventually appending to the solution do not use
2973      vec::iterate here.  */
2974   n = results->length ();
2975   for (j = 0; j < n; j++)
2976     {
2977       varinfo_t curr;
2978       c = (*results)[j];
2979       curr = get_varinfo (c.var);
2980 
2981       if (c.type == ADDRESSOF
2982 	  /* If this varinfo represents a full variable just use it.  */
2983 	  && curr->is_full_var)
2984 	c.offset = 0;
2985       else if (c.type == ADDRESSOF
2986 	       /* If we do not know the offset add all subfields.  */
2987 	       && rhsoffset == UNKNOWN_OFFSET)
2988 	{
2989 	  varinfo_t temp = lookup_vi_for_tree (curr->decl);
2990 	  do
2991 	    {
2992 	      struct constraint_expr c2;
2993 	      c2.var = temp->id;
2994 	      c2.type = ADDRESSOF;
2995 	      c2.offset = 0;
2996 	      if (c2.var != c.var)
2997 		results->safe_push (c2);
2998 	      temp = temp->next;
2999 	    }
3000 	  while (temp);
3001 	}
3002       else if (c.type == ADDRESSOF)
3003 	{
3004 	  varinfo_t temp;
3005 	  unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3006 
3007 	  /* If curr->offset + rhsoffset is less than zero adjust it.  */
3008 	  if (rhsoffset < 0
3009 	      && curr->offset < offset)
3010 	    offset = 0;
3011 
3012 	  /* We have to include all fields that overlap the current
3013 	     field shifted by rhsoffset.  And we include at least
3014 	     the last or the first field of the variable to represent
3015 	     reachability of off-bound addresses, in particular &object + 1,
3016 	     conservatively correct.  */
3017 	  temp = first_or_preceding_vi_for_offset (curr, offset);
3018 	  c.var = temp->id;
3019 	  c.offset = 0;
3020 	  temp = temp->next;
3021 	  while (temp
3022 		 && temp->offset < offset + curr->size)
3023 	    {
3024 	      struct constraint_expr c2;
3025 	      c2.var = temp->id;
3026 	      c2.type = ADDRESSOF;
3027 	      c2.offset = 0;
3028 	      results->safe_push (c2);
3029 	      temp = temp->next;
3030 	    }
3031 	}
3032       else
3033 	c.offset = rhsoffset;
3034 
3035       (*results)[j] = c;
3036     }
3037 }
3038 
3039 
3040 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3041    If address_p is true the result will be taken its address of.
3042    If lhs_p is true then the constraint expression is assumed to be used
3043    as the lhs.  */
3044 
3045 static void
3046 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3047 				  bool address_p, bool lhs_p)
3048 {
3049   tree orig_t = t;
3050   HOST_WIDE_INT bitsize = -1;
3051   HOST_WIDE_INT bitmaxsize = -1;
3052   HOST_WIDE_INT bitpos;
3053   tree forzero;
3054 
3055   /* Some people like to do cute things like take the address of
3056      &0->a.b */
3057   forzero = t;
3058   while (handled_component_p (forzero)
3059 	 || INDIRECT_REF_P (forzero)
3060 	 || TREE_CODE (forzero) == MEM_REF)
3061     forzero = TREE_OPERAND (forzero, 0);
3062 
3063   if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3064     {
3065       struct constraint_expr temp;
3066 
3067       temp.offset = 0;
3068       temp.var = integer_id;
3069       temp.type = SCALAR;
3070       results->safe_push (temp);
3071       return;
3072     }
3073 
3074   /* Handle type-punning through unions.  If we are extracting a pointer
3075      from a union via a possibly type-punning access that pointer
3076      points to anything, similar to a conversion of an integer to
3077      a pointer.  */
3078   if (!lhs_p)
3079     {
3080       tree u;
3081       for (u = t;
3082 	   TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3083 	   u = TREE_OPERAND (u, 0))
3084 	if (TREE_CODE (u) == COMPONENT_REF
3085 	    && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3086 	  {
3087 	    struct constraint_expr temp;
3088 
3089 	    temp.offset = 0;
3090 	    temp.var = anything_id;
3091 	    temp.type = ADDRESSOF;
3092 	    results->safe_push (temp);
3093 	    return;
3094 	  }
3095     }
3096 
3097   t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3098 
3099   /* Pretend to take the address of the base, we'll take care of
3100      adding the required subset of sub-fields below.  */
3101   get_constraint_for_1 (t, results, true, lhs_p);
3102   gcc_assert (results->length () == 1);
3103   struct constraint_expr &result = results->last ();
3104 
3105   if (result.type == SCALAR
3106       && get_varinfo (result.var)->is_full_var)
3107     /* For single-field vars do not bother about the offset.  */
3108     result.offset = 0;
3109   else if (result.type == SCALAR)
3110     {
3111       /* In languages like C, you can access one past the end of an
3112 	 array.  You aren't allowed to dereference it, so we can
3113 	 ignore this constraint. When we handle pointer subtraction,
3114 	 we may have to do something cute here.  */
3115 
3116       if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3117 	  && bitmaxsize != 0)
3118 	{
3119 	  /* It's also not true that the constraint will actually start at the
3120 	     right offset, it may start in some padding.  We only care about
3121 	     setting the constraint to the first actual field it touches, so
3122 	     walk to find it.  */
3123 	  struct constraint_expr cexpr = result;
3124 	  varinfo_t curr;
3125 	  results->pop ();
3126 	  cexpr.offset = 0;
3127 	  for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3128 	    {
3129 	      if (ranges_overlap_p (curr->offset, curr->size,
3130 				    bitpos, bitmaxsize))
3131 		{
3132 		  cexpr.var = curr->id;
3133 		  results->safe_push (cexpr);
3134 		  if (address_p)
3135 		    break;
3136 		}
3137 	    }
3138 	  /* If we are going to take the address of this field then
3139 	     to be able to compute reachability correctly add at least
3140 	     the last field of the variable.  */
3141 	  if (address_p && results->length () == 0)
3142 	    {
3143 	      curr = get_varinfo (cexpr.var);
3144 	      while (curr->next != NULL)
3145 		curr = curr->next;
3146 	      cexpr.var = curr->id;
3147 	      results->safe_push (cexpr);
3148 	    }
3149 	  else if (results->length () == 0)
3150 	    /* Assert that we found *some* field there. The user couldn't be
3151 	       accessing *only* padding.  */
3152 	    /* Still the user could access one past the end of an array
3153 	       embedded in a struct resulting in accessing *only* padding.  */
3154 	    /* Or accessing only padding via type-punning to a type
3155 	       that has a filed just in padding space.  */
3156 	    {
3157 	      cexpr.type = SCALAR;
3158 	      cexpr.var = anything_id;
3159 	      cexpr.offset = 0;
3160 	      results->safe_push (cexpr);
3161 	    }
3162 	}
3163       else if (bitmaxsize == 0)
3164 	{
3165 	  if (dump_file && (dump_flags & TDF_DETAILS))
3166 	    fprintf (dump_file, "Access to zero-sized part of variable,"
3167 		     "ignoring\n");
3168 	}
3169       else
3170 	if (dump_file && (dump_flags & TDF_DETAILS))
3171 	  fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3172     }
3173   else if (result.type == DEREF)
3174     {
3175       /* If we do not know exactly where the access goes say so.  Note
3176 	 that only for non-structure accesses we know that we access
3177 	 at most one subfiled of any variable.  */
3178       if (bitpos == -1
3179 	  || bitsize != bitmaxsize
3180 	  || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3181 	  || result.offset == UNKNOWN_OFFSET)
3182 	result.offset = UNKNOWN_OFFSET;
3183       else
3184 	result.offset += bitpos;
3185     }
3186   else if (result.type == ADDRESSOF)
3187     {
3188       /* We can end up here for component references on a
3189          VIEW_CONVERT_EXPR <>(&foobar).  */
3190       result.type = SCALAR;
3191       result.var = anything_id;
3192       result.offset = 0;
3193     }
3194   else
3195     gcc_unreachable ();
3196 }
3197 
3198 
3199 /* Dereference the constraint expression CONS, and return the result.
3200    DEREF (ADDRESSOF) = SCALAR
3201    DEREF (SCALAR) = DEREF
3202    DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3203    This is needed so that we can handle dereferencing DEREF constraints.  */
3204 
3205 static void
3206 do_deref (vec<ce_s> *constraints)
3207 {
3208   struct constraint_expr *c;
3209   unsigned int i = 0;
3210 
3211   FOR_EACH_VEC_ELT (*constraints, i, c)
3212     {
3213       if (c->type == SCALAR)
3214 	c->type = DEREF;
3215       else if (c->type == ADDRESSOF)
3216 	c->type = SCALAR;
3217       else if (c->type == DEREF)
3218 	{
3219 	  struct constraint_expr tmplhs;
3220 	  tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3221 	  process_constraint (new_constraint (tmplhs, *c));
3222 	  c->var = tmplhs.var;
3223 	}
3224       else
3225 	gcc_unreachable ();
3226     }
3227 }
3228 
3229 /* Given a tree T, return the constraint expression for taking the
3230    address of it.  */
3231 
3232 static void
3233 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3234 {
3235   struct constraint_expr *c;
3236   unsigned int i;
3237 
3238   get_constraint_for_1 (t, results, true, true);
3239 
3240   FOR_EACH_VEC_ELT (*results, i, c)
3241     {
3242       if (c->type == DEREF)
3243 	c->type = SCALAR;
3244       else
3245 	c->type = ADDRESSOF;
3246     }
3247 }
3248 
3249 /* Given a tree T, return the constraint expression for it.  */
3250 
3251 static void
3252 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3253 		      bool lhs_p)
3254 {
3255   struct constraint_expr temp;
3256 
3257   /* x = integer is all glommed to a single variable, which doesn't
3258      point to anything by itself.  That is, of course, unless it is an
3259      integer constant being treated as a pointer, in which case, we
3260      will return that this is really the addressof anything.  This
3261      happens below, since it will fall into the default case. The only
3262      case we know something about an integer treated like a pointer is
3263      when it is the NULL pointer, and then we just say it points to
3264      NULL.
3265 
3266      Do not do that if -fno-delete-null-pointer-checks though, because
3267      in that case *NULL does not fail, so it _should_ alias *anything.
3268      It is not worth adding a new option or renaming the existing one,
3269      since this case is relatively obscure.  */
3270   if ((TREE_CODE (t) == INTEGER_CST
3271        && integer_zerop (t))
3272       /* The only valid CONSTRUCTORs in gimple with pointer typed
3273 	 elements are zero-initializer.  But in IPA mode we also
3274 	 process global initializers, so verify at least.  */
3275       || (TREE_CODE (t) == CONSTRUCTOR
3276 	  && CONSTRUCTOR_NELTS (t) == 0))
3277     {
3278       if (flag_delete_null_pointer_checks)
3279 	temp.var = nothing_id;
3280       else
3281 	temp.var = nonlocal_id;
3282       temp.type = ADDRESSOF;
3283       temp.offset = 0;
3284       results->safe_push (temp);
3285       return;
3286     }
3287 
3288   /* String constants are read-only.  */
3289   if (TREE_CODE (t) == STRING_CST)
3290     {
3291       temp.var = readonly_id;
3292       temp.type = SCALAR;
3293       temp.offset = 0;
3294       results->safe_push (temp);
3295       return;
3296     }
3297 
3298   switch (TREE_CODE_CLASS (TREE_CODE (t)))
3299     {
3300     case tcc_expression:
3301       {
3302 	switch (TREE_CODE (t))
3303 	  {
3304 	  case ADDR_EXPR:
3305 	    get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3306 	    return;
3307 	  default:;
3308 	  }
3309 	break;
3310       }
3311     case tcc_reference:
3312       {
3313 	switch (TREE_CODE (t))
3314 	  {
3315 	  case MEM_REF:
3316 	    {
3317 	      struct constraint_expr cs;
3318 	      varinfo_t vi, curr;
3319 	      get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3320 					     TREE_OPERAND (t, 1), results);
3321 	      do_deref (results);
3322 
3323 	      /* If we are not taking the address then make sure to process
3324 		 all subvariables we might access.  */
3325 	      if (address_p)
3326 		return;
3327 
3328 	      cs = results->last ();
3329 	      if (cs.type == DEREF
3330 		  && type_can_have_subvars (TREE_TYPE (t)))
3331 		{
3332 		  /* For dereferences this means we have to defer it
3333 		     to solving time.  */
3334 		  results->last ().offset = UNKNOWN_OFFSET;
3335 		  return;
3336 		}
3337 	      if (cs.type != SCALAR)
3338 		return;
3339 
3340 	      vi = get_varinfo (cs.var);
3341 	      curr = vi->next;
3342 	      if (!vi->is_full_var
3343 		  && curr)
3344 		{
3345 		  unsigned HOST_WIDE_INT size;
3346 		  if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3347 		    size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3348 		  else
3349 		    size = -1;
3350 		  for (; curr; curr = curr->next)
3351 		    {
3352 		      if (curr->offset - vi->offset < size)
3353 			{
3354 			  cs.var = curr->id;
3355 			  results->safe_push (cs);
3356 			}
3357 		      else
3358 			break;
3359 		    }
3360 		}
3361 	      return;
3362 	    }
3363 	  case ARRAY_REF:
3364 	  case ARRAY_RANGE_REF:
3365 	  case COMPONENT_REF:
3366 	    get_constraint_for_component_ref (t, results, address_p, lhs_p);
3367 	    return;
3368 	  case VIEW_CONVERT_EXPR:
3369 	    get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3370 				  lhs_p);
3371 	    return;
3372 	  /* We are missing handling for TARGET_MEM_REF here.  */
3373 	  default:;
3374 	  }
3375 	break;
3376       }
3377     case tcc_exceptional:
3378       {
3379 	switch (TREE_CODE (t))
3380 	  {
3381 	  case SSA_NAME:
3382 	    {
3383 	      get_constraint_for_ssa_var (t, results, address_p);
3384 	      return;
3385 	    }
3386 	  case CONSTRUCTOR:
3387 	    {
3388 	      unsigned int i;
3389 	      tree val;
3390 	      vec<ce_s> tmp = vNULL;
3391 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3392 		{
3393 		  struct constraint_expr *rhsp;
3394 		  unsigned j;
3395 		  get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3396 		  FOR_EACH_VEC_ELT (tmp, j, rhsp)
3397 		    results->safe_push (*rhsp);
3398 		  tmp.truncate (0);
3399 		}
3400 	      tmp.release ();
3401 	      /* We do not know whether the constructor was complete,
3402 	         so technically we have to add &NOTHING or &ANYTHING
3403 		 like we do for an empty constructor as well.  */
3404 	      return;
3405 	    }
3406 	  default:;
3407 	  }
3408 	break;
3409       }
3410     case tcc_declaration:
3411       {
3412 	get_constraint_for_ssa_var (t, results, address_p);
3413 	return;
3414       }
3415     case tcc_constant:
3416       {
3417 	/* We cannot refer to automatic variables through constants.  */
3418 	temp.type = ADDRESSOF;
3419 	temp.var = nonlocal_id;
3420 	temp.offset = 0;
3421 	results->safe_push (temp);
3422 	return;
3423       }
3424     default:;
3425     }
3426 
3427   /* The default fallback is a constraint from anything.  */
3428   temp.type = ADDRESSOF;
3429   temp.var = anything_id;
3430   temp.offset = 0;
3431   results->safe_push (temp);
3432 }
3433 
3434 /* Given a gimple tree T, return the constraint expression vector for it.  */
3435 
3436 static void
3437 get_constraint_for (tree t, vec<ce_s> *results)
3438 {
3439   gcc_assert (results->length () == 0);
3440 
3441   get_constraint_for_1 (t, results, false, true);
3442 }
3443 
3444 /* Given a gimple tree T, return the constraint expression vector for it
3445    to be used as the rhs of a constraint.  */
3446 
3447 static void
3448 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3449 {
3450   gcc_assert (results->length () == 0);
3451 
3452   get_constraint_for_1 (t, results, false, false);
3453 }
3454 
3455 
3456 /* Efficiently generates constraints from all entries in *RHSC to all
3457    entries in *LHSC.  */
3458 
3459 static void
3460 process_all_all_constraints (vec<ce_s> lhsc,
3461 			     vec<ce_s> rhsc)
3462 {
3463   struct constraint_expr *lhsp, *rhsp;
3464   unsigned i, j;
3465 
3466   if (lhsc.length () <= 1 || rhsc.length () <= 1)
3467     {
3468       FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3469 	FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3470 	  process_constraint (new_constraint (*lhsp, *rhsp));
3471     }
3472   else
3473     {
3474       struct constraint_expr tmp;
3475       tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3476       FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3477 	process_constraint (new_constraint (tmp, *rhsp));
3478       FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3479 	process_constraint (new_constraint (*lhsp, tmp));
3480     }
3481 }
3482 
3483 /* Handle aggregate copies by expanding into copies of the respective
3484    fields of the structures.  */
3485 
3486 static void
3487 do_structure_copy (tree lhsop, tree rhsop)
3488 {
3489   struct constraint_expr *lhsp, *rhsp;
3490   vec<ce_s> lhsc = vNULL;
3491   vec<ce_s> rhsc = vNULL;
3492   unsigned j;
3493 
3494   get_constraint_for (lhsop, &lhsc);
3495   get_constraint_for_rhs (rhsop, &rhsc);
3496   lhsp = &lhsc[0];
3497   rhsp = &rhsc[0];
3498   if (lhsp->type == DEREF
3499       || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3500       || rhsp->type == DEREF)
3501     {
3502       if (lhsp->type == DEREF)
3503 	{
3504 	  gcc_assert (lhsc.length () == 1);
3505 	  lhsp->offset = UNKNOWN_OFFSET;
3506 	}
3507       if (rhsp->type == DEREF)
3508 	{
3509 	  gcc_assert (rhsc.length () == 1);
3510 	  rhsp->offset = UNKNOWN_OFFSET;
3511 	}
3512       process_all_all_constraints (lhsc, rhsc);
3513     }
3514   else if (lhsp->type == SCALAR
3515 	   && (rhsp->type == SCALAR
3516 	       || rhsp->type == ADDRESSOF))
3517     {
3518       HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3519       HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3520       unsigned k = 0;
3521       get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3522       get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3523       for (j = 0; lhsc.iterate (j, &lhsp);)
3524 	{
3525 	  varinfo_t lhsv, rhsv;
3526 	  rhsp = &rhsc[k];
3527 	  lhsv = get_varinfo (lhsp->var);
3528 	  rhsv = get_varinfo (rhsp->var);
3529 	  if (lhsv->may_have_pointers
3530 	      && (lhsv->is_full_var
3531 		  || rhsv->is_full_var
3532 		  || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3533 				       rhsv->offset + lhsoffset, rhsv->size)))
3534 	    process_constraint (new_constraint (*lhsp, *rhsp));
3535 	  if (!rhsv->is_full_var
3536 	      && (lhsv->is_full_var
3537 		  || (lhsv->offset + rhsoffset + lhsv->size
3538 		      > rhsv->offset + lhsoffset + rhsv->size)))
3539 	    {
3540 	      ++k;
3541 	      if (k >= rhsc.length ())
3542 		break;
3543 	    }
3544 	  else
3545 	    ++j;
3546 	}
3547     }
3548   else
3549     gcc_unreachable ();
3550 
3551   lhsc.release ();
3552   rhsc.release ();
3553 }
3554 
3555 /* Create constraints ID = { rhsc }.  */
3556 
3557 static void
3558 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3559 {
3560   struct constraint_expr *c;
3561   struct constraint_expr includes;
3562   unsigned int j;
3563 
3564   includes.var = id;
3565   includes.offset = 0;
3566   includes.type = SCALAR;
3567 
3568   FOR_EACH_VEC_ELT (rhsc, j, c)
3569     process_constraint (new_constraint (includes, *c));
3570 }
3571 
3572 /* Create a constraint ID = OP.  */
3573 
3574 static void
3575 make_constraint_to (unsigned id, tree op)
3576 {
3577   vec<ce_s> rhsc = vNULL;
3578   get_constraint_for_rhs (op, &rhsc);
3579   make_constraints_to (id, rhsc);
3580   rhsc.release ();
3581 }
3582 
3583 /* Create a constraint ID = &FROM.  */
3584 
3585 static void
3586 make_constraint_from (varinfo_t vi, int from)
3587 {
3588   struct constraint_expr lhs, rhs;
3589 
3590   lhs.var = vi->id;
3591   lhs.offset = 0;
3592   lhs.type = SCALAR;
3593 
3594   rhs.var = from;
3595   rhs.offset = 0;
3596   rhs.type = ADDRESSOF;
3597   process_constraint (new_constraint (lhs, rhs));
3598 }
3599 
3600 /* Create a constraint ID = FROM.  */
3601 
3602 static void
3603 make_copy_constraint (varinfo_t vi, int from)
3604 {
3605   struct constraint_expr lhs, rhs;
3606 
3607   lhs.var = vi->id;
3608   lhs.offset = 0;
3609   lhs.type = SCALAR;
3610 
3611   rhs.var = from;
3612   rhs.offset = 0;
3613   rhs.type = SCALAR;
3614   process_constraint (new_constraint (lhs, rhs));
3615 }
3616 
3617 /* Make constraints necessary to make OP escape.  */
3618 
3619 static void
3620 make_escape_constraint (tree op)
3621 {
3622   make_constraint_to (escaped_id, op);
3623 }
3624 
3625 /* Add constraints to that the solution of VI is transitively closed.  */
3626 
3627 static void
3628 make_transitive_closure_constraints (varinfo_t vi)
3629 {
3630   struct constraint_expr lhs, rhs;
3631 
3632   /* VAR = *VAR;  */
3633   lhs.type = SCALAR;
3634   lhs.var = vi->id;
3635   lhs.offset = 0;
3636   rhs.type = DEREF;
3637   rhs.var = vi->id;
3638   rhs.offset = 0;
3639   process_constraint (new_constraint (lhs, rhs));
3640 
3641   /* VAR = VAR + UNKNOWN;  */
3642   lhs.type = SCALAR;
3643   lhs.var = vi->id;
3644   lhs.offset = 0;
3645   rhs.type = SCALAR;
3646   rhs.var = vi->id;
3647   rhs.offset = UNKNOWN_OFFSET;
3648   process_constraint (new_constraint (lhs, rhs));
3649 }
3650 
3651 /* Temporary storage for fake var decls.  */
3652 struct obstack fake_var_decl_obstack;
3653 
3654 /* Build a fake VAR_DECL acting as referrer to a DECL_UID.  */
3655 
3656 static tree
3657 build_fake_var_decl (tree type)
3658 {
3659   tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3660   memset (decl, 0, sizeof (struct tree_var_decl));
3661   TREE_SET_CODE (decl, VAR_DECL);
3662   TREE_TYPE (decl) = type;
3663   DECL_UID (decl) = allocate_decl_uid ();
3664   SET_DECL_PT_UID (decl, -1);
3665   layout_decl (decl, 0);
3666   return decl;
3667 }
3668 
3669 /* Create a new artificial heap variable with NAME.
3670    Return the created variable.  */
3671 
3672 static varinfo_t
3673 make_heapvar (const char *name)
3674 {
3675   varinfo_t vi;
3676   tree heapvar;
3677 
3678   heapvar = build_fake_var_decl (ptr_type_node);
3679   DECL_EXTERNAL (heapvar) = 1;
3680 
3681   vi = new_var_info (heapvar, name);
3682   vi->is_artificial_var = true;
3683   vi->is_heap_var = true;
3684   vi->is_unknown_size_var = true;
3685   vi->offset = 0;
3686   vi->fullsize = ~0;
3687   vi->size = ~0;
3688   vi->is_full_var = true;
3689   insert_vi_for_tree (heapvar, vi);
3690 
3691   return vi;
3692 }
3693 
3694 /* Create a new artificial heap variable with NAME and make a
3695    constraint from it to LHS.  Set flags according to a tag used
3696    for tracking restrict pointers.  */
3697 
3698 static varinfo_t
3699 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3700 {
3701   varinfo_t vi = make_heapvar (name);
3702   vi->is_global_var = 1;
3703   vi->may_have_pointers = 1;
3704   make_constraint_from (lhs, vi->id);
3705   return vi;
3706 }
3707 
3708 /* Create a new artificial heap variable with NAME and make a
3709    constraint from it to LHS.  Set flags according to a tag used
3710    for tracking restrict pointers and make the artificial heap
3711    point to global memory.  */
3712 
3713 static varinfo_t
3714 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3715 {
3716   varinfo_t vi = make_constraint_from_restrict (lhs, name);
3717   make_copy_constraint (vi, nonlocal_id);
3718   return vi;
3719 }
3720 
3721 /* In IPA mode there are varinfos for different aspects of reach
3722    function designator.  One for the points-to set of the return
3723    value, one for the variables that are clobbered by the function,
3724    one for its uses and one for each parameter (including a single
3725    glob for remaining variadic arguments).  */
3726 
3727 enum { fi_clobbers = 1, fi_uses = 2,
3728        fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3729 
3730 /* Get a constraint for the requested part of a function designator FI
3731    when operating in IPA mode.  */
3732 
3733 static struct constraint_expr
3734 get_function_part_constraint (varinfo_t fi, unsigned part)
3735 {
3736   struct constraint_expr c;
3737 
3738   gcc_assert (in_ipa_mode);
3739 
3740   if (fi->id == anything_id)
3741     {
3742       /* ???  We probably should have a ANYFN special variable.  */
3743       c.var = anything_id;
3744       c.offset = 0;
3745       c.type = SCALAR;
3746     }
3747   else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3748     {
3749       varinfo_t ai = first_vi_for_offset (fi, part);
3750       if (ai)
3751 	c.var = ai->id;
3752       else
3753 	c.var = anything_id;
3754       c.offset = 0;
3755       c.type = SCALAR;
3756     }
3757   else
3758     {
3759       c.var = fi->id;
3760       c.offset = part;
3761       c.type = DEREF;
3762     }
3763 
3764   return c;
3765 }
3766 
3767 /* For non-IPA mode, generate constraints necessary for a call on the
3768    RHS.  */
3769 
3770 static void
3771 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3772 {
3773   struct constraint_expr rhsc;
3774   unsigned i;
3775   bool returns_uses = false;
3776 
3777   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3778     {
3779       tree arg = gimple_call_arg (stmt, i);
3780       int flags = gimple_call_arg_flags (stmt, i);
3781 
3782       /* If the argument is not used we can ignore it.  */
3783       if (flags & EAF_UNUSED)
3784 	continue;
3785 
3786       /* As we compute ESCAPED context-insensitive we do not gain
3787          any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3788 	 set.  The argument would still get clobbered through the
3789 	 escape solution.  */
3790       if ((flags & EAF_NOCLOBBER)
3791 	   && (flags & EAF_NOESCAPE))
3792 	{
3793 	  varinfo_t uses = get_call_use_vi (stmt);
3794 	  if (!(flags & EAF_DIRECT))
3795 	    {
3796 	      varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3797 	      make_constraint_to (tem->id, arg);
3798 	      make_transitive_closure_constraints (tem);
3799 	      make_copy_constraint (uses, tem->id);
3800 	    }
3801 	  else
3802 	    make_constraint_to (uses->id, arg);
3803 	  returns_uses = true;
3804 	}
3805       else if (flags & EAF_NOESCAPE)
3806 	{
3807 	  struct constraint_expr lhs, rhs;
3808 	  varinfo_t uses = get_call_use_vi (stmt);
3809 	  varinfo_t clobbers = get_call_clobber_vi (stmt);
3810 	  varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3811 	  make_constraint_to (tem->id, arg);
3812 	  if (!(flags & EAF_DIRECT))
3813 	    make_transitive_closure_constraints (tem);
3814 	  make_copy_constraint (uses, tem->id);
3815 	  make_copy_constraint (clobbers, tem->id);
3816 	  /* Add *tem = nonlocal, do not add *tem = callused as
3817 	     EAF_NOESCAPE parameters do not escape to other parameters
3818 	     and all other uses appear in NONLOCAL as well.  */
3819 	  lhs.type = DEREF;
3820 	  lhs.var = tem->id;
3821 	  lhs.offset = 0;
3822 	  rhs.type = SCALAR;
3823 	  rhs.var = nonlocal_id;
3824 	  rhs.offset = 0;
3825 	  process_constraint (new_constraint (lhs, rhs));
3826 	  returns_uses = true;
3827 	}
3828       else
3829 	make_escape_constraint (arg);
3830     }
3831 
3832   /* If we added to the calls uses solution make sure we account for
3833      pointers to it to be returned.  */
3834   if (returns_uses)
3835     {
3836       rhsc.var = get_call_use_vi (stmt)->id;
3837       rhsc.offset = 0;
3838       rhsc.type = SCALAR;
3839       results->safe_push (rhsc);
3840     }
3841 
3842   /* The static chain escapes as well.  */
3843   if (gimple_call_chain (stmt))
3844     make_escape_constraint (gimple_call_chain (stmt));
3845 
3846   /* And if we applied NRV the address of the return slot escapes as well.  */
3847   if (gimple_call_return_slot_opt_p (stmt)
3848       && gimple_call_lhs (stmt) != NULL_TREE
3849       && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3850     {
3851       vec<ce_s> tmpc = vNULL;
3852       struct constraint_expr lhsc, *c;
3853       get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3854       lhsc.var = escaped_id;
3855       lhsc.offset = 0;
3856       lhsc.type = SCALAR;
3857       FOR_EACH_VEC_ELT (tmpc, i, c)
3858 	process_constraint (new_constraint (lhsc, *c));
3859       tmpc.release ();
3860     }
3861 
3862   /* Regular functions return nonlocal memory.  */
3863   rhsc.var = nonlocal_id;
3864   rhsc.offset = 0;
3865   rhsc.type = SCALAR;
3866   results->safe_push (rhsc);
3867 }
3868 
3869 /* For non-IPA mode, generate constraints necessary for a call
3870    that returns a pointer and assigns it to LHS.  This simply makes
3871    the LHS point to global and escaped variables.  */
3872 
3873 static void
3874 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3875 		 tree fndecl)
3876 {
3877   vec<ce_s> lhsc = vNULL;
3878 
3879   get_constraint_for (lhs, &lhsc);
3880   /* If the store is to a global decl make sure to
3881      add proper escape constraints.  */
3882   lhs = get_base_address (lhs);
3883   if (lhs
3884       && DECL_P (lhs)
3885       && is_global_var (lhs))
3886     {
3887       struct constraint_expr tmpc;
3888       tmpc.var = escaped_id;
3889       tmpc.offset = 0;
3890       tmpc.type = SCALAR;
3891       lhsc.safe_push (tmpc);
3892     }
3893 
3894   /* If the call returns an argument unmodified override the rhs
3895      constraints.  */
3896   flags = gimple_call_return_flags (stmt);
3897   if (flags & ERF_RETURNS_ARG
3898       && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3899     {
3900       tree arg;
3901       rhsc.create (0);
3902       arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3903       get_constraint_for (arg, &rhsc);
3904       process_all_all_constraints (lhsc, rhsc);
3905       rhsc.release ();
3906     }
3907   else if (flags & ERF_NOALIAS)
3908     {
3909       varinfo_t vi;
3910       struct constraint_expr tmpc;
3911       rhsc.create (0);
3912       vi = make_heapvar ("HEAP");
3913       /* We delay marking allocated storage global until we know if
3914          it escapes.  */
3915       DECL_EXTERNAL (vi->decl) = 0;
3916       vi->is_global_var = 0;
3917       /* If this is not a real malloc call assume the memory was
3918 	 initialized and thus may point to global memory.  All
3919 	 builtin functions with the malloc attribute behave in a sane way.  */
3920       if (!fndecl
3921 	  || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3922 	make_constraint_from (vi, nonlocal_id);
3923       tmpc.var = vi->id;
3924       tmpc.offset = 0;
3925       tmpc.type = ADDRESSOF;
3926       rhsc.safe_push (tmpc);
3927       process_all_all_constraints (lhsc, rhsc);
3928       rhsc.release ();
3929     }
3930   else
3931     process_all_all_constraints (lhsc, rhsc);
3932 
3933   lhsc.release ();
3934 }
3935 
3936 /* For non-IPA mode, generate constraints necessary for a call of a
3937    const function that returns a pointer in the statement STMT.  */
3938 
3939 static void
3940 handle_const_call (gimple stmt, vec<ce_s> *results)
3941 {
3942   struct constraint_expr rhsc;
3943   unsigned int k;
3944 
3945   /* Treat nested const functions the same as pure functions as far
3946      as the static chain is concerned.  */
3947   if (gimple_call_chain (stmt))
3948     {
3949       varinfo_t uses = get_call_use_vi (stmt);
3950       make_transitive_closure_constraints (uses);
3951       make_constraint_to (uses->id, gimple_call_chain (stmt));
3952       rhsc.var = uses->id;
3953       rhsc.offset = 0;
3954       rhsc.type = SCALAR;
3955       results->safe_push (rhsc);
3956     }
3957 
3958   /* May return arguments.  */
3959   for (k = 0; k < gimple_call_num_args (stmt); ++k)
3960     {
3961       tree arg = gimple_call_arg (stmt, k);
3962       vec<ce_s> argc = vNULL;
3963       unsigned i;
3964       struct constraint_expr *argp;
3965       get_constraint_for_rhs (arg, &argc);
3966       FOR_EACH_VEC_ELT (argc, i, argp)
3967 	results->safe_push (*argp);
3968       argc.release ();
3969     }
3970 
3971   /* May return addresses of globals.  */
3972   rhsc.var = nonlocal_id;
3973   rhsc.offset = 0;
3974   rhsc.type = ADDRESSOF;
3975   results->safe_push (rhsc);
3976 }
3977 
3978 /* For non-IPA mode, generate constraints necessary for a call to a
3979    pure function in statement STMT.  */
3980 
3981 static void
3982 handle_pure_call (gimple stmt, vec<ce_s> *results)
3983 {
3984   struct constraint_expr rhsc;
3985   unsigned i;
3986   varinfo_t uses = NULL;
3987 
3988   /* Memory reached from pointer arguments is call-used.  */
3989   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3990     {
3991       tree arg = gimple_call_arg (stmt, i);
3992       if (!uses)
3993 	{
3994 	  uses = get_call_use_vi (stmt);
3995 	  make_transitive_closure_constraints (uses);
3996 	}
3997       make_constraint_to (uses->id, arg);
3998     }
3999 
4000   /* The static chain is used as well.  */
4001   if (gimple_call_chain (stmt))
4002     {
4003       if (!uses)
4004 	{
4005 	  uses = get_call_use_vi (stmt);
4006 	  make_transitive_closure_constraints (uses);
4007 	}
4008       make_constraint_to (uses->id, gimple_call_chain (stmt));
4009     }
4010 
4011   /* Pure functions may return call-used and nonlocal memory.  */
4012   if (uses)
4013     {
4014       rhsc.var = uses->id;
4015       rhsc.offset = 0;
4016       rhsc.type = SCALAR;
4017       results->safe_push (rhsc);
4018     }
4019   rhsc.var = nonlocal_id;
4020   rhsc.offset = 0;
4021   rhsc.type = SCALAR;
4022   results->safe_push (rhsc);
4023 }
4024 
4025 
4026 /* Return the varinfo for the callee of CALL.  */
4027 
4028 static varinfo_t
4029 get_fi_for_callee (gimple call)
4030 {
4031   tree decl, fn = gimple_call_fn (call);
4032 
4033   if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4034     fn = OBJ_TYPE_REF_EXPR (fn);
4035 
4036   /* If we can directly resolve the function being called, do so.
4037      Otherwise, it must be some sort of indirect expression that
4038      we should still be able to handle.  */
4039   decl = gimple_call_addr_fndecl (fn);
4040   if (decl)
4041     return get_vi_for_tree (decl);
4042 
4043   /* If the function is anything other than a SSA name pointer we have no
4044      clue and should be getting ANYFN (well, ANYTHING for now).  */
4045   if (!fn || TREE_CODE (fn) != SSA_NAME)
4046     return get_varinfo (anything_id);
4047 
4048   if (SSA_NAME_IS_DEFAULT_DEF (fn)
4049       && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4050 	  || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4051     fn = SSA_NAME_VAR (fn);
4052 
4053   return get_vi_for_tree (fn);
4054 }
4055 
4056 /* Create constraints for the builtin call T.  Return true if the call
4057    was handled, otherwise false.  */
4058 
4059 static bool
4060 find_func_aliases_for_builtin_call (gimple t)
4061 {
4062   tree fndecl = gimple_call_fndecl (t);
4063   vec<ce_s> lhsc = vNULL;
4064   vec<ce_s> rhsc = vNULL;
4065   varinfo_t fi;
4066 
4067   if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4068     /* ???  All builtins that are handled here need to be handled
4069        in the alias-oracle query functions explicitly!  */
4070     switch (DECL_FUNCTION_CODE (fndecl))
4071       {
4072       /* All the following functions return a pointer to the same object
4073 	 as their first argument points to.  The functions do not add
4074 	 to the ESCAPED solution.  The functions make the first argument
4075 	 pointed to memory point to what the second argument pointed to
4076 	 memory points to.  */
4077       case BUILT_IN_STRCPY:
4078       case BUILT_IN_STRNCPY:
4079       case BUILT_IN_BCOPY:
4080       case BUILT_IN_MEMCPY:
4081       case BUILT_IN_MEMMOVE:
4082       case BUILT_IN_MEMPCPY:
4083       case BUILT_IN_STPCPY:
4084       case BUILT_IN_STPNCPY:
4085       case BUILT_IN_STRCAT:
4086       case BUILT_IN_STRNCAT:
4087       case BUILT_IN_STRCPY_CHK:
4088       case BUILT_IN_STRNCPY_CHK:
4089       case BUILT_IN_MEMCPY_CHK:
4090       case BUILT_IN_MEMMOVE_CHK:
4091       case BUILT_IN_MEMPCPY_CHK:
4092       case BUILT_IN_STPCPY_CHK:
4093       case BUILT_IN_STPNCPY_CHK:
4094       case BUILT_IN_STRCAT_CHK:
4095       case BUILT_IN_STRNCAT_CHK:
4096       case BUILT_IN_TM_MEMCPY:
4097       case BUILT_IN_TM_MEMMOVE:
4098 	{
4099 	  tree res = gimple_call_lhs (t);
4100 	  tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4101 					   == BUILT_IN_BCOPY ? 1 : 0));
4102 	  tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4103 					  == BUILT_IN_BCOPY ? 0 : 1));
4104 	  if (res != NULL_TREE)
4105 	    {
4106 	      get_constraint_for (res, &lhsc);
4107 	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4108 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4109 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4110 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4111 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4112 		  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4113 		get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4114 	      else
4115 		get_constraint_for (dest, &rhsc);
4116 	      process_all_all_constraints (lhsc, rhsc);
4117 	      lhsc.release ();
4118 	      rhsc.release ();
4119 	    }
4120 	  get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4121 	  get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4122 	  do_deref (&lhsc);
4123 	  do_deref (&rhsc);
4124 	  process_all_all_constraints (lhsc, rhsc);
4125 	  lhsc.release ();
4126 	  rhsc.release ();
4127 	  return true;
4128 	}
4129       case BUILT_IN_MEMSET:
4130       case BUILT_IN_MEMSET_CHK:
4131       case BUILT_IN_TM_MEMSET:
4132 	{
4133 	  tree res = gimple_call_lhs (t);
4134 	  tree dest = gimple_call_arg (t, 0);
4135 	  unsigned i;
4136 	  ce_s *lhsp;
4137 	  struct constraint_expr ac;
4138 	  if (res != NULL_TREE)
4139 	    {
4140 	      get_constraint_for (res, &lhsc);
4141 	      get_constraint_for (dest, &rhsc);
4142 	      process_all_all_constraints (lhsc, rhsc);
4143 	      lhsc.release ();
4144 	      rhsc.release ();
4145 	    }
4146 	  get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4147 	  do_deref (&lhsc);
4148 	  if (flag_delete_null_pointer_checks
4149 	      && integer_zerop (gimple_call_arg (t, 1)))
4150 	    {
4151 	      ac.type = ADDRESSOF;
4152 	      ac.var = nothing_id;
4153 	    }
4154 	  else
4155 	    {
4156 	      ac.type = SCALAR;
4157 	      ac.var = integer_id;
4158 	    }
4159 	  ac.offset = 0;
4160 	  FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4161 	      process_constraint (new_constraint (*lhsp, ac));
4162 	  lhsc.release ();
4163 	  return true;
4164 	}
4165       case BUILT_IN_ASSUME_ALIGNED:
4166 	{
4167 	  tree res = gimple_call_lhs (t);
4168 	  tree dest = gimple_call_arg (t, 0);
4169 	  if (res != NULL_TREE)
4170 	    {
4171 	      get_constraint_for (res, &lhsc);
4172 	      get_constraint_for (dest, &rhsc);
4173 	      process_all_all_constraints (lhsc, rhsc);
4174 	      lhsc.release ();
4175 	      rhsc.release ();
4176 	    }
4177 	  return true;
4178 	}
4179       /* All the following functions do not return pointers, do not
4180 	 modify the points-to sets of memory reachable from their
4181 	 arguments and do not add to the ESCAPED solution.  */
4182       case BUILT_IN_SINCOS:
4183       case BUILT_IN_SINCOSF:
4184       case BUILT_IN_SINCOSL:
4185       case BUILT_IN_FREXP:
4186       case BUILT_IN_FREXPF:
4187       case BUILT_IN_FREXPL:
4188       case BUILT_IN_GAMMA_R:
4189       case BUILT_IN_GAMMAF_R:
4190       case BUILT_IN_GAMMAL_R:
4191       case BUILT_IN_LGAMMA_R:
4192       case BUILT_IN_LGAMMAF_R:
4193       case BUILT_IN_LGAMMAL_R:
4194       case BUILT_IN_MODF:
4195       case BUILT_IN_MODFF:
4196       case BUILT_IN_MODFL:
4197       case BUILT_IN_REMQUO:
4198       case BUILT_IN_REMQUOF:
4199       case BUILT_IN_REMQUOL:
4200       case BUILT_IN_FREE:
4201 	return true;
4202       case BUILT_IN_STRDUP:
4203       case BUILT_IN_STRNDUP:
4204 	if (gimple_call_lhs (t))
4205 	  {
4206 	    handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4207 			     vNULL, fndecl);
4208 	    get_constraint_for_ptr_offset (gimple_call_lhs (t),
4209 					   NULL_TREE, &lhsc);
4210 	    get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4211 					   NULL_TREE, &rhsc);
4212 	    do_deref (&lhsc);
4213 	    do_deref (&rhsc);
4214 	    process_all_all_constraints (lhsc, rhsc);
4215 	    lhsc.release ();
4216 	    rhsc.release ();
4217 	    return true;
4218 	  }
4219 	break;
4220       /* Trampolines are special - they set up passing the static
4221 	 frame.  */
4222       case BUILT_IN_INIT_TRAMPOLINE:
4223 	{
4224 	  tree tramp = gimple_call_arg (t, 0);
4225 	  tree nfunc = gimple_call_arg (t, 1);
4226 	  tree frame = gimple_call_arg (t, 2);
4227 	  unsigned i;
4228 	  struct constraint_expr lhs, *rhsp;
4229 	  if (in_ipa_mode)
4230 	    {
4231 	      varinfo_t nfi = NULL;
4232 	      gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4233 	      nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4234 	      if (nfi)
4235 		{
4236 		  lhs = get_function_part_constraint (nfi, fi_static_chain);
4237 		  get_constraint_for (frame, &rhsc);
4238 		  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4239 		      process_constraint (new_constraint (lhs, *rhsp));
4240 		  rhsc.release ();
4241 
4242 		  /* Make the frame point to the function for
4243 		     the trampoline adjustment call.  */
4244 		  get_constraint_for (tramp, &lhsc);
4245 		  do_deref (&lhsc);
4246 		  get_constraint_for (nfunc, &rhsc);
4247 		  process_all_all_constraints (lhsc, rhsc);
4248 		  rhsc.release ();
4249 		  lhsc.release ();
4250 
4251 		  return true;
4252 		}
4253 	    }
4254 	  /* Else fallthru to generic handling which will let
4255 	     the frame escape.  */
4256 	  break;
4257 	}
4258       case BUILT_IN_ADJUST_TRAMPOLINE:
4259 	{
4260 	  tree tramp = gimple_call_arg (t, 0);
4261 	  tree res = gimple_call_lhs (t);
4262 	  if (in_ipa_mode && res)
4263 	    {
4264 	      get_constraint_for (res, &lhsc);
4265 	      get_constraint_for (tramp, &rhsc);
4266 	      do_deref (&rhsc);
4267 	      process_all_all_constraints (lhsc, rhsc);
4268 	      rhsc.release ();
4269 	      lhsc.release ();
4270 	    }
4271 	  return true;
4272 	}
4273       CASE_BUILT_IN_TM_STORE (1):
4274       CASE_BUILT_IN_TM_STORE (2):
4275       CASE_BUILT_IN_TM_STORE (4):
4276       CASE_BUILT_IN_TM_STORE (8):
4277       CASE_BUILT_IN_TM_STORE (FLOAT):
4278       CASE_BUILT_IN_TM_STORE (DOUBLE):
4279       CASE_BUILT_IN_TM_STORE (LDOUBLE):
4280       CASE_BUILT_IN_TM_STORE (M64):
4281       CASE_BUILT_IN_TM_STORE (M128):
4282       CASE_BUILT_IN_TM_STORE (M256):
4283 	{
4284 	  tree addr = gimple_call_arg (t, 0);
4285 	  tree src = gimple_call_arg (t, 1);
4286 
4287 	  get_constraint_for (addr, &lhsc);
4288 	  do_deref (&lhsc);
4289 	  get_constraint_for (src, &rhsc);
4290 	  process_all_all_constraints (lhsc, rhsc);
4291 	  lhsc.release ();
4292 	  rhsc.release ();
4293 	  return true;
4294 	}
4295       CASE_BUILT_IN_TM_LOAD (1):
4296       CASE_BUILT_IN_TM_LOAD (2):
4297       CASE_BUILT_IN_TM_LOAD (4):
4298       CASE_BUILT_IN_TM_LOAD (8):
4299       CASE_BUILT_IN_TM_LOAD (FLOAT):
4300       CASE_BUILT_IN_TM_LOAD (DOUBLE):
4301       CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4302       CASE_BUILT_IN_TM_LOAD (M64):
4303       CASE_BUILT_IN_TM_LOAD (M128):
4304       CASE_BUILT_IN_TM_LOAD (M256):
4305 	{
4306 	  tree dest = gimple_call_lhs (t);
4307 	  tree addr = gimple_call_arg (t, 0);
4308 
4309 	  get_constraint_for (dest, &lhsc);
4310 	  get_constraint_for (addr, &rhsc);
4311 	  do_deref (&rhsc);
4312 	  process_all_all_constraints (lhsc, rhsc);
4313 	  lhsc.release ();
4314 	  rhsc.release ();
4315 	  return true;
4316 	}
4317       /* Variadic argument handling needs to be handled in IPA
4318 	 mode as well.  */
4319       case BUILT_IN_VA_START:
4320 	{
4321 	  tree valist = gimple_call_arg (t, 0);
4322 	  struct constraint_expr rhs, *lhsp;
4323 	  unsigned i;
4324 	  get_constraint_for (valist, &lhsc);
4325 	  do_deref (&lhsc);
4326 	  /* The va_list gets access to pointers in variadic
4327 	     arguments.  Which we know in the case of IPA analysis
4328 	     and otherwise are just all nonlocal variables.  */
4329 	  if (in_ipa_mode)
4330 	    {
4331 	      fi = lookup_vi_for_tree (cfun->decl);
4332 	      rhs = get_function_part_constraint (fi, ~0);
4333 	      rhs.type = ADDRESSOF;
4334 	    }
4335 	  else
4336 	    {
4337 	      rhs.var = nonlocal_id;
4338 	      rhs.type = ADDRESSOF;
4339 	      rhs.offset = 0;
4340 	    }
4341 	  FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4342 	    process_constraint (new_constraint (*lhsp, rhs));
4343 	  lhsc.release ();
4344 	  /* va_list is clobbered.  */
4345 	  make_constraint_to (get_call_clobber_vi (t)->id, valist);
4346 	  return true;
4347 	}
4348       /* va_end doesn't have any effect that matters.  */
4349       case BUILT_IN_VA_END:
4350 	return true;
4351       /* Alternate return.  Simply give up for now.  */
4352       case BUILT_IN_RETURN:
4353 	{
4354 	  fi = NULL;
4355 	  if (!in_ipa_mode
4356 	      || !(fi = get_vi_for_tree (cfun->decl)))
4357 	    make_constraint_from (get_varinfo (escaped_id), anything_id);
4358 	  else if (in_ipa_mode
4359 		   && fi != NULL)
4360 	    {
4361 	      struct constraint_expr lhs, rhs;
4362 	      lhs = get_function_part_constraint (fi, fi_result);
4363 	      rhs.var = anything_id;
4364 	      rhs.offset = 0;
4365 	      rhs.type = SCALAR;
4366 	      process_constraint (new_constraint (lhs, rhs));
4367 	    }
4368 	  return true;
4369 	}
4370       /* printf-style functions may have hooks to set pointers to
4371 	 point to somewhere into the generated string.  Leave them
4372 	 for a later excercise...  */
4373       default:
4374 	/* Fallthru to general call handling.  */;
4375       }
4376 
4377   return false;
4378 }
4379 
4380 /* Create constraints for the call T.  */
4381 
4382 static void
4383 find_func_aliases_for_call (gimple t)
4384 {
4385   tree fndecl = gimple_call_fndecl (t);
4386   vec<ce_s> lhsc = vNULL;
4387   vec<ce_s> rhsc = vNULL;
4388   varinfo_t fi;
4389 
4390   if (fndecl != NULL_TREE
4391       && DECL_BUILT_IN (fndecl)
4392       && find_func_aliases_for_builtin_call (t))
4393     return;
4394 
4395   fi = get_fi_for_callee (t);
4396   if (!in_ipa_mode
4397       || (fndecl && !fi->is_fn_info))
4398     {
4399       vec<ce_s> rhsc = vNULL;
4400       int flags = gimple_call_flags (t);
4401 
4402       /* Const functions can return their arguments and addresses
4403 	 of global memory but not of escaped memory.  */
4404       if (flags & (ECF_CONST|ECF_NOVOPS))
4405 	{
4406 	  if (gimple_call_lhs (t))
4407 	    handle_const_call (t, &rhsc);
4408 	}
4409       /* Pure functions can return addresses in and of memory
4410 	 reachable from their arguments, but they are not an escape
4411 	 point for reachable memory of their arguments.  */
4412       else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4413 	handle_pure_call (t, &rhsc);
4414       else
4415 	handle_rhs_call (t, &rhsc);
4416       if (gimple_call_lhs (t))
4417 	handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4418       rhsc.release ();
4419     }
4420   else
4421     {
4422       tree lhsop;
4423       unsigned j;
4424 
4425       /* Assign all the passed arguments to the appropriate incoming
4426 	 parameters of the function.  */
4427       for (j = 0; j < gimple_call_num_args (t); j++)
4428 	{
4429 	  struct constraint_expr lhs ;
4430 	  struct constraint_expr *rhsp;
4431 	  tree arg = gimple_call_arg (t, j);
4432 
4433 	  get_constraint_for_rhs (arg, &rhsc);
4434 	  lhs = get_function_part_constraint (fi, fi_parm_base + j);
4435 	  while (rhsc.length () != 0)
4436 	    {
4437 	      rhsp = &rhsc.last ();
4438 	      process_constraint (new_constraint (lhs, *rhsp));
4439 	      rhsc.pop ();
4440 	    }
4441 	}
4442 
4443       /* If we are returning a value, assign it to the result.  */
4444       lhsop = gimple_call_lhs (t);
4445       if (lhsop)
4446 	{
4447 	  struct constraint_expr rhs;
4448 	  struct constraint_expr *lhsp;
4449 
4450 	  get_constraint_for (lhsop, &lhsc);
4451 	  rhs = get_function_part_constraint (fi, fi_result);
4452 	  if (fndecl
4453 	      && DECL_RESULT (fndecl)
4454 	      && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4455 	    {
4456 	      vec<ce_s> tem = vNULL;
4457 	      tem.safe_push (rhs);
4458 	      do_deref (&tem);
4459 	      rhs = tem[0];
4460 	      tem.release ();
4461 	    }
4462 	  FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4463 	    process_constraint (new_constraint (*lhsp, rhs));
4464 	}
4465 
4466       /* If we pass the result decl by reference, honor that.  */
4467       if (lhsop
4468 	  && fndecl
4469 	  && DECL_RESULT (fndecl)
4470 	  && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4471 	{
4472 	  struct constraint_expr lhs;
4473 	  struct constraint_expr *rhsp;
4474 
4475 	  get_constraint_for_address_of (lhsop, &rhsc);
4476 	  lhs = get_function_part_constraint (fi, fi_result);
4477 	  FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4478 	    process_constraint (new_constraint (lhs, *rhsp));
4479 	  rhsc.release ();
4480 	}
4481 
4482       /* If we use a static chain, pass it along.  */
4483       if (gimple_call_chain (t))
4484 	{
4485 	  struct constraint_expr lhs;
4486 	  struct constraint_expr *rhsp;
4487 
4488 	  get_constraint_for (gimple_call_chain (t), &rhsc);
4489 	  lhs = get_function_part_constraint (fi, fi_static_chain);
4490 	  FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4491 	    process_constraint (new_constraint (lhs, *rhsp));
4492 	}
4493     }
4494 }
4495 
4496 /* Walk statement T setting up aliasing constraints according to the
4497    references found in T.  This function is the main part of the
4498    constraint builder.  AI points to auxiliary alias information used
4499    when building alias sets and computing alias grouping heuristics.  */
4500 
4501 static void
4502 find_func_aliases (gimple origt)
4503 {
4504   gimple t = origt;
4505   vec<ce_s> lhsc = vNULL;
4506   vec<ce_s> rhsc = vNULL;
4507   struct constraint_expr *c;
4508   varinfo_t fi;
4509 
4510   /* Now build constraints expressions.  */
4511   if (gimple_code (t) == GIMPLE_PHI)
4512     {
4513       size_t i;
4514       unsigned int j;
4515 
4516       /* For a phi node, assign all the arguments to
4517 	 the result.  */
4518       get_constraint_for (gimple_phi_result (t), &lhsc);
4519       for (i = 0; i < gimple_phi_num_args (t); i++)
4520 	{
4521 	  tree strippedrhs = PHI_ARG_DEF (t, i);
4522 
4523 	  STRIP_NOPS (strippedrhs);
4524 	  get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4525 
4526 	  FOR_EACH_VEC_ELT (lhsc, j, c)
4527 	    {
4528 	      struct constraint_expr *c2;
4529 	      while (rhsc.length () > 0)
4530 		{
4531 		  c2 = &rhsc.last ();
4532 		  process_constraint (new_constraint (*c, *c2));
4533 		  rhsc.pop ();
4534 		}
4535 	    }
4536 	}
4537     }
4538   /* In IPA mode, we need to generate constraints to pass call
4539      arguments through their calls.   There are two cases,
4540      either a GIMPLE_CALL returning a value, or just a plain
4541      GIMPLE_CALL when we are not.
4542 
4543      In non-ipa mode, we need to generate constraints for each
4544      pointer passed by address.  */
4545   else if (is_gimple_call (t))
4546     find_func_aliases_for_call (t);
4547 
4548   /* Otherwise, just a regular assignment statement.  Only care about
4549      operations with pointer result, others are dealt with as escape
4550      points if they have pointer operands.  */
4551   else if (is_gimple_assign (t))
4552     {
4553       /* Otherwise, just a regular assignment statement.  */
4554       tree lhsop = gimple_assign_lhs (t);
4555       tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4556 
4557       if (rhsop && TREE_CLOBBER_P (rhsop))
4558 	/* Ignore clobbers, they don't actually store anything into
4559 	   the LHS.  */
4560 	;
4561       else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4562 	do_structure_copy (lhsop, rhsop);
4563       else
4564 	{
4565 	  enum tree_code code = gimple_assign_rhs_code (t);
4566 
4567 	  get_constraint_for (lhsop, &lhsc);
4568 
4569 	  if (code == POINTER_PLUS_EXPR)
4570 	    get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4571 					   gimple_assign_rhs2 (t), &rhsc);
4572 	  else if (code == BIT_AND_EXPR
4573 		   && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4574 	    {
4575 	      /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4576 		 the pointer.  Handle it by offsetting it by UNKNOWN.  */
4577 	      get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4578 					     NULL_TREE, &rhsc);
4579 	    }
4580 	  else if ((CONVERT_EXPR_CODE_P (code)
4581 		    && !(POINTER_TYPE_P (gimple_expr_type (t))
4582 			 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4583 		   || gimple_assign_single_p (t))
4584 	    get_constraint_for_rhs (rhsop, &rhsc);
4585 	  else if (code == COND_EXPR)
4586 	    {
4587 	      /* The result is a merge of both COND_EXPR arms.  */
4588 	      vec<ce_s> tmp = vNULL;
4589 	      struct constraint_expr *rhsp;
4590 	      unsigned i;
4591 	      get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4592 	      get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4593 	      FOR_EACH_VEC_ELT (tmp, i, rhsp)
4594 		rhsc.safe_push (*rhsp);
4595 	      tmp.release ();
4596 	    }
4597 	  else if (truth_value_p (code))
4598 	    /* Truth value results are not pointer (parts).  Or at least
4599 	       very very unreasonable obfuscation of a part.  */
4600 	    ;
4601 	  else
4602 	    {
4603 	      /* All other operations are merges.  */
4604 	      vec<ce_s> tmp = vNULL;
4605 	      struct constraint_expr *rhsp;
4606 	      unsigned i, j;
4607 	      get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4608 	      for (i = 2; i < gimple_num_ops (t); ++i)
4609 		{
4610 		  get_constraint_for_rhs (gimple_op (t, i), &tmp);
4611 		  FOR_EACH_VEC_ELT (tmp, j, rhsp)
4612 		    rhsc.safe_push (*rhsp);
4613 		  tmp.truncate (0);
4614 		}
4615 	      tmp.release ();
4616 	    }
4617 	  process_all_all_constraints (lhsc, rhsc);
4618 	}
4619       /* If there is a store to a global variable the rhs escapes.  */
4620       if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4621 	  && DECL_P (lhsop)
4622 	  && is_global_var (lhsop)
4623 	  && (!in_ipa_mode
4624 	      || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4625 	make_escape_constraint (rhsop);
4626     }
4627   /* Handle escapes through return.  */
4628   else if (gimple_code (t) == GIMPLE_RETURN
4629 	   && gimple_return_retval (t) != NULL_TREE)
4630     {
4631       fi = NULL;
4632       if (!in_ipa_mode
4633 	  || !(fi = get_vi_for_tree (cfun->decl)))
4634 	make_escape_constraint (gimple_return_retval (t));
4635       else if (in_ipa_mode
4636 	       && fi != NULL)
4637 	{
4638 	  struct constraint_expr lhs ;
4639 	  struct constraint_expr *rhsp;
4640 	  unsigned i;
4641 
4642 	  lhs = get_function_part_constraint (fi, fi_result);
4643 	  get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4644 	  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4645 	    process_constraint (new_constraint (lhs, *rhsp));
4646 	}
4647     }
4648   /* Handle asms conservatively by adding escape constraints to everything.  */
4649   else if (gimple_code (t) == GIMPLE_ASM)
4650     {
4651       unsigned i, noutputs;
4652       const char **oconstraints;
4653       const char *constraint;
4654       bool allows_mem, allows_reg, is_inout;
4655 
4656       noutputs = gimple_asm_noutputs (t);
4657       oconstraints = XALLOCAVEC (const char *, noutputs);
4658 
4659       for (i = 0; i < noutputs; ++i)
4660 	{
4661 	  tree link = gimple_asm_output_op (t, i);
4662 	  tree op = TREE_VALUE (link);
4663 
4664 	  constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4665 	  oconstraints[i] = constraint;
4666 	  parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4667 				   &allows_reg, &is_inout);
4668 
4669 	  /* A memory constraint makes the address of the operand escape.  */
4670 	  if (!allows_reg && allows_mem)
4671 	    make_escape_constraint (build_fold_addr_expr (op));
4672 
4673 	  /* The asm may read global memory, so outputs may point to
4674 	     any global memory.  */
4675 	  if (op)
4676 	    {
4677 	      vec<ce_s> lhsc = vNULL;
4678 	      struct constraint_expr rhsc, *lhsp;
4679 	      unsigned j;
4680 	      get_constraint_for (op, &lhsc);
4681 	      rhsc.var = nonlocal_id;
4682 	      rhsc.offset = 0;
4683 	      rhsc.type = SCALAR;
4684 	      FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4685 		process_constraint (new_constraint (*lhsp, rhsc));
4686 	      lhsc.release ();
4687 	    }
4688 	}
4689       for (i = 0; i < gimple_asm_ninputs (t); ++i)
4690 	{
4691 	  tree link = gimple_asm_input_op (t, i);
4692 	  tree op = TREE_VALUE (link);
4693 
4694 	  constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4695 
4696 	  parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4697 				  &allows_mem, &allows_reg);
4698 
4699 	  /* A memory constraint makes the address of the operand escape.  */
4700 	  if (!allows_reg && allows_mem)
4701 	    make_escape_constraint (build_fold_addr_expr (op));
4702 	  /* Strictly we'd only need the constraint to ESCAPED if
4703 	     the asm clobbers memory, otherwise using something
4704 	     along the lines of per-call clobbers/uses would be enough.  */
4705 	  else if (op)
4706 	    make_escape_constraint (op);
4707 	}
4708     }
4709 
4710   rhsc.release ();
4711   lhsc.release ();
4712 }
4713 
4714 
4715 /* Create a constraint adding to the clobber set of FI the memory
4716    pointed to by PTR.  */
4717 
4718 static void
4719 process_ipa_clobber (varinfo_t fi, tree ptr)
4720 {
4721   vec<ce_s> ptrc = vNULL;
4722   struct constraint_expr *c, lhs;
4723   unsigned i;
4724   get_constraint_for_rhs (ptr, &ptrc);
4725   lhs = get_function_part_constraint (fi, fi_clobbers);
4726   FOR_EACH_VEC_ELT (ptrc, i, c)
4727     process_constraint (new_constraint (lhs, *c));
4728   ptrc.release ();
4729 }
4730 
4731 /* Walk statement T setting up clobber and use constraints according to the
4732    references found in T.  This function is a main part of the
4733    IPA constraint builder.  */
4734 
4735 static void
4736 find_func_clobbers (gimple origt)
4737 {
4738   gimple t = origt;
4739   vec<ce_s> lhsc = vNULL;
4740   vec<ce_s> rhsc = vNULL;
4741   varinfo_t fi;
4742 
4743   /* Add constraints for clobbered/used in IPA mode.
4744      We are not interested in what automatic variables are clobbered
4745      or used as we only use the information in the caller to which
4746      they do not escape.  */
4747   gcc_assert (in_ipa_mode);
4748 
4749   /* If the stmt refers to memory in any way it better had a VUSE.  */
4750   if (gimple_vuse (t) == NULL_TREE)
4751     return;
4752 
4753   /* We'd better have function information for the current function.  */
4754   fi = lookup_vi_for_tree (cfun->decl);
4755   gcc_assert (fi != NULL);
4756 
4757   /* Account for stores in assignments and calls.  */
4758   if (gimple_vdef (t) != NULL_TREE
4759       && gimple_has_lhs (t))
4760     {
4761       tree lhs = gimple_get_lhs (t);
4762       tree tem = lhs;
4763       while (handled_component_p (tem))
4764 	tem = TREE_OPERAND (tem, 0);
4765       if ((DECL_P (tem)
4766 	   && !auto_var_in_fn_p (tem, cfun->decl))
4767 	  || INDIRECT_REF_P (tem)
4768 	  || (TREE_CODE (tem) == MEM_REF
4769 	      && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4770 		   && auto_var_in_fn_p
4771 		        (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4772 	{
4773 	  struct constraint_expr lhsc, *rhsp;
4774 	  unsigned i;
4775 	  lhsc = get_function_part_constraint (fi, fi_clobbers);
4776 	  get_constraint_for_address_of (lhs, &rhsc);
4777 	  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4778 	    process_constraint (new_constraint (lhsc, *rhsp));
4779 	  rhsc.release ();
4780 	}
4781     }
4782 
4783   /* Account for uses in assigments and returns.  */
4784   if (gimple_assign_single_p (t)
4785       || (gimple_code (t) == GIMPLE_RETURN
4786 	  && gimple_return_retval (t) != NULL_TREE))
4787     {
4788       tree rhs = (gimple_assign_single_p (t)
4789 		  ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4790       tree tem = rhs;
4791       while (handled_component_p (tem))
4792 	tem = TREE_OPERAND (tem, 0);
4793       if ((DECL_P (tem)
4794 	   && !auto_var_in_fn_p (tem, cfun->decl))
4795 	  || INDIRECT_REF_P (tem)
4796 	  || (TREE_CODE (tem) == MEM_REF
4797 	      && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4798 		   && auto_var_in_fn_p
4799 		        (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4800 	{
4801 	  struct constraint_expr lhs, *rhsp;
4802 	  unsigned i;
4803 	  lhs = get_function_part_constraint (fi, fi_uses);
4804 	  get_constraint_for_address_of (rhs, &rhsc);
4805 	  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4806 	    process_constraint (new_constraint (lhs, *rhsp));
4807 	  rhsc.release ();
4808 	}
4809     }
4810 
4811   if (is_gimple_call (t))
4812     {
4813       varinfo_t cfi = NULL;
4814       tree decl = gimple_call_fndecl (t);
4815       struct constraint_expr lhs, rhs;
4816       unsigned i, j;
4817 
4818       /* For builtins we do not have separate function info.  For those
4819 	 we do not generate escapes for we have to generate clobbers/uses.  */
4820       if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4821 	switch (DECL_FUNCTION_CODE (decl))
4822 	  {
4823 	  /* The following functions use and clobber memory pointed to
4824 	     by their arguments.  */
4825 	  case BUILT_IN_STRCPY:
4826 	  case BUILT_IN_STRNCPY:
4827 	  case BUILT_IN_BCOPY:
4828 	  case BUILT_IN_MEMCPY:
4829 	  case BUILT_IN_MEMMOVE:
4830 	  case BUILT_IN_MEMPCPY:
4831 	  case BUILT_IN_STPCPY:
4832 	  case BUILT_IN_STPNCPY:
4833 	  case BUILT_IN_STRCAT:
4834 	  case BUILT_IN_STRNCAT:
4835 	  case BUILT_IN_STRCPY_CHK:
4836 	  case BUILT_IN_STRNCPY_CHK:
4837 	  case BUILT_IN_MEMCPY_CHK:
4838 	  case BUILT_IN_MEMMOVE_CHK:
4839 	  case BUILT_IN_MEMPCPY_CHK:
4840 	  case BUILT_IN_STPCPY_CHK:
4841 	  case BUILT_IN_STPNCPY_CHK:
4842 	  case BUILT_IN_STRCAT_CHK:
4843 	  case BUILT_IN_STRNCAT_CHK:
4844 	    {
4845 	      tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4846 					       == BUILT_IN_BCOPY ? 1 : 0));
4847 	      tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4848 					      == BUILT_IN_BCOPY ? 0 : 1));
4849 	      unsigned i;
4850 	      struct constraint_expr *rhsp, *lhsp;
4851 	      get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4852 	      lhs = get_function_part_constraint (fi, fi_clobbers);
4853 	      FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4854 		process_constraint (new_constraint (lhs, *lhsp));
4855 	      lhsc.release ();
4856 	      get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4857 	      lhs = get_function_part_constraint (fi, fi_uses);
4858 	      FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4859 		process_constraint (new_constraint (lhs, *rhsp));
4860 	      rhsc.release ();
4861 	      return;
4862 	    }
4863 	  /* The following function clobbers memory pointed to by
4864 	     its argument.  */
4865 	  case BUILT_IN_MEMSET:
4866 	  case BUILT_IN_MEMSET_CHK:
4867 	    {
4868 	      tree dest = gimple_call_arg (t, 0);
4869 	      unsigned i;
4870 	      ce_s *lhsp;
4871 	      get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4872 	      lhs = get_function_part_constraint (fi, fi_clobbers);
4873 	      FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4874 		process_constraint (new_constraint (lhs, *lhsp));
4875 	      lhsc.release ();
4876 	      return;
4877 	    }
4878 	  /* The following functions clobber their second and third
4879 	     arguments.  */
4880 	  case BUILT_IN_SINCOS:
4881 	  case BUILT_IN_SINCOSF:
4882 	  case BUILT_IN_SINCOSL:
4883 	    {
4884 	      process_ipa_clobber (fi, gimple_call_arg (t, 1));
4885 	      process_ipa_clobber (fi, gimple_call_arg (t, 2));
4886 	      return;
4887 	    }
4888 	  /* The following functions clobber their second argument.  */
4889 	  case BUILT_IN_FREXP:
4890 	  case BUILT_IN_FREXPF:
4891 	  case BUILT_IN_FREXPL:
4892 	  case BUILT_IN_LGAMMA_R:
4893 	  case BUILT_IN_LGAMMAF_R:
4894 	  case BUILT_IN_LGAMMAL_R:
4895 	  case BUILT_IN_GAMMA_R:
4896 	  case BUILT_IN_GAMMAF_R:
4897 	  case BUILT_IN_GAMMAL_R:
4898 	  case BUILT_IN_MODF:
4899 	  case BUILT_IN_MODFF:
4900 	  case BUILT_IN_MODFL:
4901 	    {
4902 	      process_ipa_clobber (fi, gimple_call_arg (t, 1));
4903 	      return;
4904 	    }
4905 	  /* The following functions clobber their third argument.  */
4906 	  case BUILT_IN_REMQUO:
4907 	  case BUILT_IN_REMQUOF:
4908 	  case BUILT_IN_REMQUOL:
4909 	    {
4910 	      process_ipa_clobber (fi, gimple_call_arg (t, 2));
4911 	      return;
4912 	    }
4913 	  /* The following functions neither read nor clobber memory.  */
4914 	  case BUILT_IN_ASSUME_ALIGNED:
4915 	  case BUILT_IN_FREE:
4916 	    return;
4917 	  /* Trampolines are of no interest to us.  */
4918 	  case BUILT_IN_INIT_TRAMPOLINE:
4919 	  case BUILT_IN_ADJUST_TRAMPOLINE:
4920 	    return;
4921 	  case BUILT_IN_VA_START:
4922 	  case BUILT_IN_VA_END:
4923 	    return;
4924 	  /* printf-style functions may have hooks to set pointers to
4925 	     point to somewhere into the generated string.  Leave them
4926 	     for a later excercise...  */
4927 	  default:
4928 	    /* Fallthru to general call handling.  */;
4929 	  }
4930 
4931       /* Parameters passed by value are used.  */
4932       lhs = get_function_part_constraint (fi, fi_uses);
4933       for (i = 0; i < gimple_call_num_args (t); i++)
4934 	{
4935 	  struct constraint_expr *rhsp;
4936 	  tree arg = gimple_call_arg (t, i);
4937 
4938 	  if (TREE_CODE (arg) == SSA_NAME
4939 	      || is_gimple_min_invariant (arg))
4940 	    continue;
4941 
4942 	  get_constraint_for_address_of (arg, &rhsc);
4943 	  FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4944 	    process_constraint (new_constraint (lhs, *rhsp));
4945 	  rhsc.release ();
4946 	}
4947 
4948       /* Build constraints for propagating clobbers/uses along the
4949 	 callgraph edges.  */
4950       cfi = get_fi_for_callee (t);
4951       if (cfi->id == anything_id)
4952 	{
4953 	  if (gimple_vdef (t))
4954 	    make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4955 				  anything_id);
4956 	  make_constraint_from (first_vi_for_offset (fi, fi_uses),
4957 				anything_id);
4958 	  return;
4959 	}
4960 
4961       /* For callees without function info (that's external functions),
4962 	 ESCAPED is clobbered and used.  */
4963       if (gimple_call_fndecl (t)
4964 	  && !cfi->is_fn_info)
4965 	{
4966 	  varinfo_t vi;
4967 
4968 	  if (gimple_vdef (t))
4969 	    make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4970 				  escaped_id);
4971 	  make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4972 
4973 	  /* Also honor the call statement use/clobber info.  */
4974 	  if ((vi = lookup_call_clobber_vi (t)) != NULL)
4975 	    make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4976 				  vi->id);
4977 	  if ((vi = lookup_call_use_vi (t)) != NULL)
4978 	    make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4979 				  vi->id);
4980 	  return;
4981 	}
4982 
4983       /* Otherwise the caller clobbers and uses what the callee does.
4984 	 ???  This should use a new complex constraint that filters
4985 	 local variables of the callee.  */
4986       if (gimple_vdef (t))
4987 	{
4988 	  lhs = get_function_part_constraint (fi, fi_clobbers);
4989 	  rhs = get_function_part_constraint (cfi, fi_clobbers);
4990 	  process_constraint (new_constraint (lhs, rhs));
4991 	}
4992       lhs = get_function_part_constraint (fi, fi_uses);
4993       rhs = get_function_part_constraint (cfi, fi_uses);
4994       process_constraint (new_constraint (lhs, rhs));
4995     }
4996   else if (gimple_code (t) == GIMPLE_ASM)
4997     {
4998       /* ???  Ick.  We can do better.  */
4999       if (gimple_vdef (t))
5000 	make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5001 			      anything_id);
5002       make_constraint_from (first_vi_for_offset (fi, fi_uses),
5003 			    anything_id);
5004     }
5005 
5006   rhsc.release ();
5007 }
5008 
5009 
5010 /* Find the first varinfo in the same variable as START that overlaps with
5011    OFFSET.  Return NULL if we can't find one.  */
5012 
5013 static varinfo_t
5014 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5015 {
5016   /* If the offset is outside of the variable, bail out.  */
5017   if (offset >= start->fullsize)
5018     return NULL;
5019 
5020   /* If we cannot reach offset from start, lookup the first field
5021      and start from there.  */
5022   if (start->offset > offset)
5023     start = lookup_vi_for_tree (start->decl);
5024 
5025   while (start)
5026     {
5027       /* We may not find a variable in the field list with the actual
5028 	 offset when when we have glommed a structure to a variable.
5029 	 In that case, however, offset should still be within the size
5030 	 of the variable. */
5031       if (offset >= start->offset
5032 	  && (offset - start->offset) < start->size)
5033 	return start;
5034 
5035       start= start->next;
5036     }
5037 
5038   return NULL;
5039 }
5040 
5041 /* Find the first varinfo in the same variable as START that overlaps with
5042    OFFSET.  If there is no such varinfo the varinfo directly preceding
5043    OFFSET is returned.  */
5044 
5045 static varinfo_t
5046 first_or_preceding_vi_for_offset (varinfo_t start,
5047 				  unsigned HOST_WIDE_INT offset)
5048 {
5049   /* If we cannot reach offset from start, lookup the first field
5050      and start from there.  */
5051   if (start->offset > offset)
5052     start = lookup_vi_for_tree (start->decl);
5053 
5054   /* We may not find a variable in the field list with the actual
5055      offset when when we have glommed a structure to a variable.
5056      In that case, however, offset should still be within the size
5057      of the variable.
5058      If we got beyond the offset we look for return the field
5059      directly preceding offset which may be the last field.  */
5060   while (start->next
5061 	 && offset >= start->offset
5062 	 && !((offset - start->offset) < start->size))
5063     start = start->next;
5064 
5065   return start;
5066 }
5067 
5068 
5069 /* This structure is used during pushing fields onto the fieldstack
5070    to track the offset of the field, since bitpos_of_field gives it
5071    relative to its immediate containing type, and we want it relative
5072    to the ultimate containing object.  */
5073 
5074 struct fieldoff
5075 {
5076   /* Offset from the base of the base containing object to this field.  */
5077   HOST_WIDE_INT offset;
5078 
5079   /* Size, in bits, of the field.  */
5080   unsigned HOST_WIDE_INT size;
5081 
5082   unsigned has_unknown_size : 1;
5083 
5084   unsigned must_have_pointers : 1;
5085 
5086   unsigned may_have_pointers : 1;
5087 
5088   unsigned only_restrict_pointers : 1;
5089 };
5090 typedef struct fieldoff fieldoff_s;
5091 
5092 
5093 /* qsort comparison function for two fieldoff's PA and PB */
5094 
5095 static int
5096 fieldoff_compare (const void *pa, const void *pb)
5097 {
5098   const fieldoff_s *foa = (const fieldoff_s *)pa;
5099   const fieldoff_s *fob = (const fieldoff_s *)pb;
5100   unsigned HOST_WIDE_INT foasize, fobsize;
5101 
5102   if (foa->offset < fob->offset)
5103     return -1;
5104   else if (foa->offset > fob->offset)
5105     return 1;
5106 
5107   foasize = foa->size;
5108   fobsize = fob->size;
5109   if (foasize < fobsize)
5110     return -1;
5111   else if (foasize > fobsize)
5112     return 1;
5113   return 0;
5114 }
5115 
5116 /* Sort a fieldstack according to the field offset and sizes.  */
5117 static void
5118 sort_fieldstack (vec<fieldoff_s> fieldstack)
5119 {
5120   fieldstack.qsort (fieldoff_compare);
5121 }
5122 
5123 /* Return true if T is a type that can have subvars.  */
5124 
5125 static inline bool
5126 type_can_have_subvars (const_tree t)
5127 {
5128   /* Aggregates without overlapping fields can have subvars.  */
5129   return TREE_CODE (t) == RECORD_TYPE;
5130 }
5131 
5132 /* Return true if V is a tree that we can have subvars for.
5133    Normally, this is any aggregate type.  Also complex
5134    types which are not gimple registers can have subvars.  */
5135 
5136 static inline bool
5137 var_can_have_subvars (const_tree v)
5138 {
5139   /* Volatile variables should never have subvars.  */
5140   if (TREE_THIS_VOLATILE (v))
5141     return false;
5142 
5143   /* Non decls or memory tags can never have subvars.  */
5144   if (!DECL_P (v))
5145     return false;
5146 
5147   return type_can_have_subvars (TREE_TYPE (v));
5148 }
5149 
5150 /* Return true if T is a type that does contain pointers.  */
5151 
5152 static bool
5153 type_must_have_pointers (tree type)
5154 {
5155   if (POINTER_TYPE_P (type))
5156     return true;
5157 
5158   if (TREE_CODE (type) == ARRAY_TYPE)
5159     return type_must_have_pointers (TREE_TYPE (type));
5160 
5161   /* A function or method can have pointers as arguments, so track
5162      those separately.  */
5163   if (TREE_CODE (type) == FUNCTION_TYPE
5164       || TREE_CODE (type) == METHOD_TYPE)
5165     return true;
5166 
5167   return false;
5168 }
5169 
5170 static bool
5171 field_must_have_pointers (tree t)
5172 {
5173   return type_must_have_pointers (TREE_TYPE (t));
5174 }
5175 
5176 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5177    the fields of TYPE onto fieldstack, recording their offsets along
5178    the way.
5179 
5180    OFFSET is used to keep track of the offset in this entire
5181    structure, rather than just the immediately containing structure.
5182    Returns false if the caller is supposed to handle the field we
5183    recursed for.  */
5184 
5185 static bool
5186 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5187 			     HOST_WIDE_INT offset)
5188 {
5189   tree field;
5190   bool empty_p = true;
5191 
5192   if (TREE_CODE (type) != RECORD_TYPE)
5193     return false;
5194 
5195   /* If the vector of fields is growing too big, bail out early.
5196      Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5197      sure this fails.  */
5198   if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5199     return false;
5200 
5201   for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5202     if (TREE_CODE (field) == FIELD_DECL)
5203       {
5204 	bool push = false;
5205 	HOST_WIDE_INT foff = bitpos_of_field (field);
5206 
5207 	if (!var_can_have_subvars (field)
5208 	    || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5209 	    || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5210 	  push = true;
5211 	else if (!push_fields_onto_fieldstack
5212 		    (TREE_TYPE (field), fieldstack, offset + foff)
5213 		 && (DECL_SIZE (field)
5214 		     && !integer_zerop (DECL_SIZE (field))))
5215 	  /* Empty structures may have actual size, like in C++.  So
5216 	     see if we didn't push any subfields and the size is
5217 	     nonzero, push the field onto the stack.  */
5218 	  push = true;
5219 
5220 	if (push)
5221 	  {
5222 	    fieldoff_s *pair = NULL;
5223 	    bool has_unknown_size = false;
5224 	    bool must_have_pointers_p;
5225 
5226 	    if (!fieldstack->is_empty ())
5227 	      pair = &fieldstack->last ();
5228 
5229 	    /* If there isn't anything at offset zero, create sth.  */
5230 	    if (!pair
5231 		&& offset + foff != 0)
5232 	      {
5233 		fieldoff_s e = {0, offset + foff, false, false, false, false};
5234 		pair = fieldstack->safe_push (e);
5235 	      }
5236 
5237 	    if (!DECL_SIZE (field)
5238 		|| !host_integerp (DECL_SIZE (field), 1))
5239 	      has_unknown_size = true;
5240 
5241 	    /* If adjacent fields do not contain pointers merge them.  */
5242 	    must_have_pointers_p = field_must_have_pointers (field);
5243 	    if (pair
5244 		&& !has_unknown_size
5245 		&& !must_have_pointers_p
5246 		&& !pair->must_have_pointers
5247 		&& !pair->has_unknown_size
5248 		&& pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5249 	      {
5250 		pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5251 	      }
5252 	    else
5253 	      {
5254 		fieldoff_s e;
5255 		e.offset = offset + foff;
5256 		e.has_unknown_size = has_unknown_size;
5257 		if (!has_unknown_size)
5258 		  e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5259 		else
5260 		  e.size = -1;
5261 		e.must_have_pointers = must_have_pointers_p;
5262 		e.may_have_pointers = true;
5263 		e.only_restrict_pointers
5264 		  = (!has_unknown_size
5265 		     && POINTER_TYPE_P (TREE_TYPE (field))
5266 		     && TYPE_RESTRICT (TREE_TYPE (field)));
5267 		fieldstack->safe_push (e);
5268 	      }
5269 	  }
5270 
5271 	empty_p = false;
5272       }
5273 
5274   return !empty_p;
5275 }
5276 
5277 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5278    if it is a varargs function.  */
5279 
5280 static unsigned int
5281 count_num_arguments (tree decl, bool *is_varargs)
5282 {
5283   unsigned int num = 0;
5284   tree t;
5285 
5286   /* Capture named arguments for K&R functions.  They do not
5287      have a prototype and thus no TYPE_ARG_TYPES.  */
5288   for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5289     ++num;
5290 
5291   /* Check if the function has variadic arguments.  */
5292   for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5293     if (TREE_VALUE (t) == void_type_node)
5294       break;
5295   if (!t)
5296     *is_varargs = true;
5297 
5298   return num;
5299 }
5300 
5301 /* Creation function node for DECL, using NAME, and return the index
5302    of the variable we've created for the function.  */
5303 
5304 static varinfo_t
5305 create_function_info_for (tree decl, const char *name)
5306 {
5307   struct function *fn = DECL_STRUCT_FUNCTION (decl);
5308   varinfo_t vi, prev_vi;
5309   tree arg;
5310   unsigned int i;
5311   bool is_varargs = false;
5312   unsigned int num_args = count_num_arguments (decl, &is_varargs);
5313 
5314   /* Create the variable info.  */
5315 
5316   vi = new_var_info (decl, name);
5317   vi->offset = 0;
5318   vi->size = 1;
5319   vi->fullsize = fi_parm_base + num_args;
5320   vi->is_fn_info = 1;
5321   vi->may_have_pointers = false;
5322   if (is_varargs)
5323     vi->fullsize = ~0;
5324   insert_vi_for_tree (vi->decl, vi);
5325 
5326   prev_vi = vi;
5327 
5328   /* Create a variable for things the function clobbers and one for
5329      things the function uses.  */
5330     {
5331       varinfo_t clobbervi, usevi;
5332       const char *newname;
5333       char *tempname;
5334 
5335       asprintf (&tempname, "%s.clobber", name);
5336       newname = ggc_strdup (tempname);
5337       free (tempname);
5338 
5339       clobbervi = new_var_info (NULL, newname);
5340       clobbervi->offset = fi_clobbers;
5341       clobbervi->size = 1;
5342       clobbervi->fullsize = vi->fullsize;
5343       clobbervi->is_full_var = true;
5344       clobbervi->is_global_var = false;
5345       gcc_assert (prev_vi->offset < clobbervi->offset);
5346       prev_vi->next = clobbervi;
5347       prev_vi = clobbervi;
5348 
5349       asprintf (&tempname, "%s.use", name);
5350       newname = ggc_strdup (tempname);
5351       free (tempname);
5352 
5353       usevi = new_var_info (NULL, newname);
5354       usevi->offset = fi_uses;
5355       usevi->size = 1;
5356       usevi->fullsize = vi->fullsize;
5357       usevi->is_full_var = true;
5358       usevi->is_global_var = false;
5359       gcc_assert (prev_vi->offset < usevi->offset);
5360       prev_vi->next = usevi;
5361       prev_vi = usevi;
5362     }
5363 
5364   /* And one for the static chain.  */
5365   if (fn->static_chain_decl != NULL_TREE)
5366     {
5367       varinfo_t chainvi;
5368       const char *newname;
5369       char *tempname;
5370 
5371       asprintf (&tempname, "%s.chain", name);
5372       newname = ggc_strdup (tempname);
5373       free (tempname);
5374 
5375       chainvi = new_var_info (fn->static_chain_decl, newname);
5376       chainvi->offset = fi_static_chain;
5377       chainvi->size = 1;
5378       chainvi->fullsize = vi->fullsize;
5379       chainvi->is_full_var = true;
5380       chainvi->is_global_var = false;
5381       gcc_assert (prev_vi->offset < chainvi->offset);
5382       prev_vi->next = chainvi;
5383       prev_vi = chainvi;
5384       insert_vi_for_tree (fn->static_chain_decl, chainvi);
5385     }
5386 
5387   /* Create a variable for the return var.  */
5388   if (DECL_RESULT (decl) != NULL
5389       || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5390     {
5391       varinfo_t resultvi;
5392       const char *newname;
5393       char *tempname;
5394       tree resultdecl = decl;
5395 
5396       if (DECL_RESULT (decl))
5397 	resultdecl = DECL_RESULT (decl);
5398 
5399       asprintf (&tempname, "%s.result", name);
5400       newname = ggc_strdup (tempname);
5401       free (tempname);
5402 
5403       resultvi = new_var_info (resultdecl, newname);
5404       resultvi->offset = fi_result;
5405       resultvi->size = 1;
5406       resultvi->fullsize = vi->fullsize;
5407       resultvi->is_full_var = true;
5408       if (DECL_RESULT (decl))
5409 	resultvi->may_have_pointers = true;
5410       gcc_assert (prev_vi->offset < resultvi->offset);
5411       prev_vi->next = resultvi;
5412       prev_vi = resultvi;
5413       if (DECL_RESULT (decl))
5414 	insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5415     }
5416 
5417   /* Set up variables for each argument.  */
5418   arg = DECL_ARGUMENTS (decl);
5419   for (i = 0; i < num_args; i++)
5420     {
5421       varinfo_t argvi;
5422       const char *newname;
5423       char *tempname;
5424       tree argdecl = decl;
5425 
5426       if (arg)
5427 	argdecl = arg;
5428 
5429       asprintf (&tempname, "%s.arg%d", name, i);
5430       newname = ggc_strdup (tempname);
5431       free (tempname);
5432 
5433       argvi = new_var_info (argdecl, newname);
5434       argvi->offset = fi_parm_base + i;
5435       argvi->size = 1;
5436       argvi->is_full_var = true;
5437       argvi->fullsize = vi->fullsize;
5438       if (arg)
5439 	argvi->may_have_pointers = true;
5440       gcc_assert (prev_vi->offset < argvi->offset);
5441       prev_vi->next = argvi;
5442       prev_vi = argvi;
5443       if (arg)
5444 	{
5445 	  insert_vi_for_tree (arg, argvi);
5446 	  arg = DECL_CHAIN (arg);
5447 	}
5448     }
5449 
5450   /* Add one representative for all further args.  */
5451   if (is_varargs)
5452     {
5453       varinfo_t argvi;
5454       const char *newname;
5455       char *tempname;
5456       tree decl;
5457 
5458       asprintf (&tempname, "%s.varargs", name);
5459       newname = ggc_strdup (tempname);
5460       free (tempname);
5461 
5462       /* We need sth that can be pointed to for va_start.  */
5463       decl = build_fake_var_decl (ptr_type_node);
5464 
5465       argvi = new_var_info (decl, newname);
5466       argvi->offset = fi_parm_base + num_args;
5467       argvi->size = ~0;
5468       argvi->is_full_var = true;
5469       argvi->is_heap_var = true;
5470       argvi->fullsize = vi->fullsize;
5471       gcc_assert (prev_vi->offset < argvi->offset);
5472       prev_vi->next = argvi;
5473       prev_vi = argvi;
5474     }
5475 
5476   return vi;
5477 }
5478 
5479 
5480 /* Return true if FIELDSTACK contains fields that overlap.
5481    FIELDSTACK is assumed to be sorted by offset.  */
5482 
5483 static bool
5484 check_for_overlaps (vec<fieldoff_s> fieldstack)
5485 {
5486   fieldoff_s *fo = NULL;
5487   unsigned int i;
5488   HOST_WIDE_INT lastoffset = -1;
5489 
5490   FOR_EACH_VEC_ELT (fieldstack, i, fo)
5491     {
5492       if (fo->offset == lastoffset)
5493 	return true;
5494       lastoffset = fo->offset;
5495     }
5496   return false;
5497 }
5498 
5499 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5500    This will also create any varinfo structures necessary for fields
5501    of DECL.  */
5502 
5503 static varinfo_t
5504 create_variable_info_for_1 (tree decl, const char *name)
5505 {
5506   varinfo_t vi, newvi;
5507   tree decl_type = TREE_TYPE (decl);
5508   tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5509   vec<fieldoff_s> fieldstack = vNULL;
5510   fieldoff_s *fo;
5511   unsigned int i;
5512 
5513   if (!declsize
5514       || !host_integerp (declsize, 1))
5515     {
5516       vi = new_var_info (decl, name);
5517       vi->offset = 0;
5518       vi->size = ~0;
5519       vi->fullsize = ~0;
5520       vi->is_unknown_size_var = true;
5521       vi->is_full_var = true;
5522       vi->may_have_pointers = true;
5523       return vi;
5524     }
5525 
5526   /* Collect field information.  */
5527   if (use_field_sensitive
5528       && var_can_have_subvars (decl)
5529       /* ???  Force us to not use subfields for global initializers
5530 	 in IPA mode.  Else we'd have to parse arbitrary initializers.  */
5531       && !(in_ipa_mode
5532 	   && is_global_var (decl)
5533 	   && DECL_INITIAL (decl)))
5534     {
5535       fieldoff_s *fo = NULL;
5536       bool notokay = false;
5537       unsigned int i;
5538 
5539       push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5540 
5541       for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5542 	if (fo->has_unknown_size
5543 	    || fo->offset < 0)
5544 	  {
5545 	    notokay = true;
5546 	    break;
5547 	  }
5548 
5549       /* We can't sort them if we have a field with a variable sized type,
5550 	 which will make notokay = true.  In that case, we are going to return
5551 	 without creating varinfos for the fields anyway, so sorting them is a
5552 	 waste to boot.  */
5553       if (!notokay)
5554 	{
5555 	  sort_fieldstack (fieldstack);
5556 	  /* Due to some C++ FE issues, like PR 22488, we might end up
5557 	     what appear to be overlapping fields even though they,
5558 	     in reality, do not overlap.  Until the C++ FE is fixed,
5559 	     we will simply disable field-sensitivity for these cases.  */
5560 	  notokay = check_for_overlaps (fieldstack);
5561 	}
5562 
5563       if (notokay)
5564 	fieldstack.release ();
5565     }
5566 
5567   /* If we didn't end up collecting sub-variables create a full
5568      variable for the decl.  */
5569   if (fieldstack.length () <= 1
5570       || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5571     {
5572       vi = new_var_info (decl, name);
5573       vi->offset = 0;
5574       vi->may_have_pointers = true;
5575       vi->fullsize = TREE_INT_CST_LOW (declsize);
5576       vi->size = vi->fullsize;
5577       vi->is_full_var = true;
5578       fieldstack.release ();
5579       return vi;
5580     }
5581 
5582   vi = new_var_info (decl, name);
5583   vi->fullsize = TREE_INT_CST_LOW (declsize);
5584   for (i = 0, newvi = vi;
5585        fieldstack.iterate (i, &fo);
5586        ++i, newvi = newvi->next)
5587     {
5588       const char *newname = "NULL";
5589       char *tempname;
5590 
5591       if (dump_file)
5592 	{
5593 	  asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5594 		    "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5595 	  newname = ggc_strdup (tempname);
5596 	  free (tempname);
5597 	}
5598       newvi->name = newname;
5599       newvi->offset = fo->offset;
5600       newvi->size = fo->size;
5601       newvi->fullsize = vi->fullsize;
5602       newvi->may_have_pointers = fo->may_have_pointers;
5603       newvi->only_restrict_pointers = fo->only_restrict_pointers;
5604       if (i + 1 < fieldstack.length ())
5605 	newvi->next = new_var_info (decl, name);
5606     }
5607 
5608   fieldstack.release ();
5609 
5610   return vi;
5611 }
5612 
5613 static unsigned int
5614 create_variable_info_for (tree decl, const char *name)
5615 {
5616   varinfo_t vi = create_variable_info_for_1 (decl, name);
5617   unsigned int id = vi->id;
5618 
5619   insert_vi_for_tree (decl, vi);
5620 
5621   if (TREE_CODE (decl) != VAR_DECL)
5622     return id;
5623 
5624   /* Create initial constraints for globals.  */
5625   for (; vi; vi = vi->next)
5626     {
5627       if (!vi->may_have_pointers
5628 	  || !vi->is_global_var)
5629 	continue;
5630 
5631       /* Mark global restrict qualified pointers.  */
5632       if ((POINTER_TYPE_P (TREE_TYPE (decl))
5633 	   && TYPE_RESTRICT (TREE_TYPE (decl)))
5634 	  || vi->only_restrict_pointers)
5635 	{
5636 	  make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5637 	  continue;
5638 	}
5639 
5640       /* In non-IPA mode the initializer from nonlocal is all we need.  */
5641       if (!in_ipa_mode
5642 	  || DECL_HARD_REGISTER (decl))
5643 	make_copy_constraint (vi, nonlocal_id);
5644 
5645       /* In IPA mode parse the initializer and generate proper constraints
5646 	 for it.  */
5647       else
5648 	{
5649 	  struct varpool_node *vnode = varpool_get_node (decl);
5650 
5651 	  /* For escaped variables initialize them from nonlocal.  */
5652 	  if (!varpool_all_refs_explicit_p (vnode))
5653 	    make_copy_constraint (vi, nonlocal_id);
5654 
5655 	  /* If this is a global variable with an initializer and we are in
5656 	     IPA mode generate constraints for it.  */
5657 	  if (DECL_INITIAL (decl)
5658 	      && vnode->analyzed)
5659 	    {
5660 	      vec<ce_s> rhsc = vNULL;
5661 	      struct constraint_expr lhs, *rhsp;
5662 	      unsigned i;
5663 	      get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5664 	      lhs.var = vi->id;
5665 	      lhs.offset = 0;
5666 	      lhs.type = SCALAR;
5667 	      FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5668 		process_constraint (new_constraint (lhs, *rhsp));
5669 	      /* If this is a variable that escapes from the unit
5670 		 the initializer escapes as well.  */
5671 	      if (!varpool_all_refs_explicit_p (vnode))
5672 		{
5673 		  lhs.var = escaped_id;
5674 		  lhs.offset = 0;
5675 		  lhs.type = SCALAR;
5676 		  FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5677 		    process_constraint (new_constraint (lhs, *rhsp));
5678 		}
5679 	      rhsc.release ();
5680 	    }
5681 	}
5682     }
5683 
5684   return id;
5685 }
5686 
5687 /* Print out the points-to solution for VAR to FILE.  */
5688 
5689 static void
5690 dump_solution_for_var (FILE *file, unsigned int var)
5691 {
5692   varinfo_t vi = get_varinfo (var);
5693   unsigned int i;
5694   bitmap_iterator bi;
5695 
5696   /* Dump the solution for unified vars anyway, this avoids difficulties
5697      in scanning dumps in the testsuite.  */
5698   fprintf (file, "%s = { ", vi->name);
5699   vi = get_varinfo (find (var));
5700   EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5701     fprintf (file, "%s ", get_varinfo (i)->name);
5702   fprintf (file, "}");
5703 
5704   /* But note when the variable was unified.  */
5705   if (vi->id != var)
5706     fprintf (file, " same as %s", vi->name);
5707 
5708   fprintf (file, "\n");
5709 }
5710 
5711 /* Print the points-to solution for VAR to stdout.  */
5712 
5713 DEBUG_FUNCTION void
5714 debug_solution_for_var (unsigned int var)
5715 {
5716   dump_solution_for_var (stdout, var);
5717 }
5718 
5719 /* Create varinfo structures for all of the variables in the
5720    function for intraprocedural mode.  */
5721 
5722 static void
5723 intra_create_variable_infos (void)
5724 {
5725   tree t;
5726 
5727   /* For each incoming pointer argument arg, create the constraint ARG
5728      = NONLOCAL or a dummy variable if it is a restrict qualified
5729      passed-by-reference argument.  */
5730   for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5731     {
5732       varinfo_t p = get_vi_for_tree (t);
5733 
5734       /* For restrict qualified pointers to objects passed by
5735          reference build a real representative for the pointed-to object.
5736 	 Treat restrict qualified references the same.  */
5737       if (TYPE_RESTRICT (TREE_TYPE (t))
5738 	  && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5739 	      || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5740 	  && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5741 	{
5742 	  struct constraint_expr lhsc, rhsc;
5743 	  varinfo_t vi;
5744 	  tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5745 	  DECL_EXTERNAL (heapvar) = 1;
5746 	  vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5747 	  insert_vi_for_tree (heapvar, vi);
5748 	  lhsc.var = p->id;
5749 	  lhsc.type = SCALAR;
5750 	  lhsc.offset = 0;
5751 	  rhsc.var = vi->id;
5752 	  rhsc.type = ADDRESSOF;
5753 	  rhsc.offset = 0;
5754 	  process_constraint (new_constraint (lhsc, rhsc));
5755 	  for (; vi; vi = vi->next)
5756 	    if (vi->may_have_pointers)
5757 	      {
5758 		if (vi->only_restrict_pointers)
5759 		  make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5760 		else
5761 		  make_copy_constraint (vi, nonlocal_id);
5762 	      }
5763 	  continue;
5764 	}
5765 
5766       if (POINTER_TYPE_P (TREE_TYPE (t))
5767 	  && TYPE_RESTRICT (TREE_TYPE (t)))
5768 	make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5769       else
5770 	{
5771 	  for (; p; p = p->next)
5772 	    {
5773 	      if (p->only_restrict_pointers)
5774 		make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5775 	      else if (p->may_have_pointers)
5776 		make_constraint_from (p, nonlocal_id);
5777 	    }
5778 	}
5779     }
5780 
5781   /* Add a constraint for a result decl that is passed by reference.  */
5782   if (DECL_RESULT (cfun->decl)
5783       && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5784     {
5785       varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5786 
5787       for (p = result_vi; p; p = p->next)
5788 	make_constraint_from (p, nonlocal_id);
5789     }
5790 
5791   /* Add a constraint for the incoming static chain parameter.  */
5792   if (cfun->static_chain_decl != NULL_TREE)
5793     {
5794       varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5795 
5796       for (p = chain_vi; p; p = p->next)
5797 	make_constraint_from (p, nonlocal_id);
5798     }
5799 }
5800 
5801 /* Structure used to put solution bitmaps in a hashtable so they can
5802    be shared among variables with the same points-to set.  */
5803 
5804 typedef struct shared_bitmap_info
5805 {
5806   bitmap pt_vars;
5807   hashval_t hashcode;
5808 } *shared_bitmap_info_t;
5809 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5810 
5811 static htab_t shared_bitmap_table;
5812 
5813 /* Hash function for a shared_bitmap_info_t */
5814 
5815 static hashval_t
5816 shared_bitmap_hash (const void *p)
5817 {
5818   const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5819   return bi->hashcode;
5820 }
5821 
5822 /* Equality function for two shared_bitmap_info_t's. */
5823 
5824 static int
5825 shared_bitmap_eq (const void *p1, const void *p2)
5826 {
5827   const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5828   const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5829   return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5830 }
5831 
5832 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5833    existing instance if there is one, NULL otherwise.  */
5834 
5835 static bitmap
5836 shared_bitmap_lookup (bitmap pt_vars)
5837 {
5838   void **slot;
5839   struct shared_bitmap_info sbi;
5840 
5841   sbi.pt_vars = pt_vars;
5842   sbi.hashcode = bitmap_hash (pt_vars);
5843 
5844   slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5845 				   sbi.hashcode, NO_INSERT);
5846   if (!slot)
5847     return NULL;
5848   else
5849     return ((shared_bitmap_info_t) *slot)->pt_vars;
5850 }
5851 
5852 
5853 /* Add a bitmap to the shared bitmap hashtable.  */
5854 
5855 static void
5856 shared_bitmap_add (bitmap pt_vars)
5857 {
5858   void **slot;
5859   shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5860 
5861   sbi->pt_vars = pt_vars;
5862   sbi->hashcode = bitmap_hash (pt_vars);
5863 
5864   slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5865 				   sbi->hashcode, INSERT);
5866   gcc_assert (!*slot);
5867   *slot = (void *) sbi;
5868 }
5869 
5870 
5871 /* Set bits in INTO corresponding to the variable uids in solution set FROM.  */
5872 
5873 static void
5874 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5875 {
5876   unsigned int i;
5877   bitmap_iterator bi;
5878 
5879   EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5880     {
5881       varinfo_t vi = get_varinfo (i);
5882 
5883       /* The only artificial variables that are allowed in a may-alias
5884 	 set are heap variables.  */
5885       if (vi->is_artificial_var && !vi->is_heap_var)
5886 	continue;
5887 
5888       if (TREE_CODE (vi->decl) == VAR_DECL
5889 	  || TREE_CODE (vi->decl) == PARM_DECL
5890 	  || TREE_CODE (vi->decl) == RESULT_DECL)
5891 	{
5892 	  /* If we are in IPA mode we will not recompute points-to
5893 	     sets after inlining so make sure they stay valid.  */
5894 	  if (in_ipa_mode
5895 	      && !DECL_PT_UID_SET_P (vi->decl))
5896 	    SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5897 
5898 	  /* Add the decl to the points-to set.  Note that the points-to
5899 	     set contains global variables.  */
5900 	  bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5901 	  if (vi->is_global_var)
5902 	    pt->vars_contains_global = true;
5903 	}
5904     }
5905 }
5906 
5907 
5908 /* Compute the points-to solution *PT for the variable VI.  */
5909 
5910 static struct pt_solution
5911 find_what_var_points_to (varinfo_t orig_vi)
5912 {
5913   unsigned int i;
5914   bitmap_iterator bi;
5915   bitmap finished_solution;
5916   bitmap result;
5917   varinfo_t vi;
5918   void **slot;
5919   struct pt_solution *pt;
5920 
5921   /* This variable may have been collapsed, let's get the real
5922      variable.  */
5923   vi = get_varinfo (find (orig_vi->id));
5924 
5925   /* See if we have already computed the solution and return it.  */
5926   slot = pointer_map_insert (final_solutions, vi);
5927   if (*slot != NULL)
5928     return *(struct pt_solution *)*slot;
5929 
5930   *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
5931   memset (pt, 0, sizeof (struct pt_solution));
5932 
5933   /* Translate artificial variables into SSA_NAME_PTR_INFO
5934      attributes.  */
5935   EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5936     {
5937       varinfo_t vi = get_varinfo (i);
5938 
5939       if (vi->is_artificial_var)
5940 	{
5941 	  if (vi->id == nothing_id)
5942 	    pt->null = 1;
5943 	  else if (vi->id == escaped_id)
5944 	    {
5945 	      if (in_ipa_mode)
5946 		pt->ipa_escaped = 1;
5947 	      else
5948 		pt->escaped = 1;
5949 	    }
5950 	  else if (vi->id == nonlocal_id)
5951 	    pt->nonlocal = 1;
5952 	  else if (vi->is_heap_var)
5953 	    /* We represent heapvars in the points-to set properly.  */
5954 	    ;
5955 	  else if (vi->id == readonly_id)
5956 	    /* Nobody cares.  */
5957 	    ;
5958 	  else if (vi->id == anything_id
5959 		   || vi->id == integer_id)
5960 	    pt->anything = 1;
5961 	}
5962     }
5963 
5964   /* Instead of doing extra work, simply do not create
5965      elaborate points-to information for pt_anything pointers.  */
5966   if (pt->anything)
5967     return *pt;
5968 
5969   /* Share the final set of variables when possible.  */
5970   finished_solution = BITMAP_GGC_ALLOC ();
5971   stats.points_to_sets_created++;
5972 
5973   set_uids_in_ptset (finished_solution, vi->solution, pt);
5974   result = shared_bitmap_lookup (finished_solution);
5975   if (!result)
5976     {
5977       shared_bitmap_add (finished_solution);
5978       pt->vars = finished_solution;
5979     }
5980   else
5981     {
5982       pt->vars = result;
5983       bitmap_clear (finished_solution);
5984     }
5985 
5986   return *pt;
5987 }
5988 
5989 /* Given a pointer variable P, fill in its points-to set.  */
5990 
5991 static void
5992 find_what_p_points_to (tree p)
5993 {
5994   struct ptr_info_def *pi;
5995   tree lookup_p = p;
5996   varinfo_t vi;
5997 
5998   /* For parameters, get at the points-to set for the actual parm
5999      decl.  */
6000   if (TREE_CODE (p) == SSA_NAME
6001       && SSA_NAME_IS_DEFAULT_DEF (p)
6002       && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6003 	  || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6004     lookup_p = SSA_NAME_VAR (p);
6005 
6006   vi = lookup_vi_for_tree (lookup_p);
6007   if (!vi)
6008     return;
6009 
6010   pi = get_ptr_info (p);
6011   pi->pt = find_what_var_points_to (vi);
6012 }
6013 
6014 
6015 /* Query statistics for points-to solutions.  */
6016 
6017 static struct {
6018   unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6019   unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6020   unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6021   unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6022 } pta_stats;
6023 
6024 void
6025 dump_pta_stats (FILE *s)
6026 {
6027   fprintf (s, "\nPTA query stats:\n");
6028   fprintf (s, "  pt_solution_includes: "
6029 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6030 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
6031 	   pta_stats.pt_solution_includes_no_alias,
6032 	   pta_stats.pt_solution_includes_no_alias
6033 	   + pta_stats.pt_solution_includes_may_alias);
6034   fprintf (s, "  pt_solutions_intersect: "
6035 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6036 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
6037 	   pta_stats.pt_solutions_intersect_no_alias,
6038 	   pta_stats.pt_solutions_intersect_no_alias
6039 	   + pta_stats.pt_solutions_intersect_may_alias);
6040 }
6041 
6042 
6043 /* Reset the points-to solution *PT to a conservative default
6044    (point to anything).  */
6045 
6046 void
6047 pt_solution_reset (struct pt_solution *pt)
6048 {
6049   memset (pt, 0, sizeof (struct pt_solution));
6050   pt->anything = true;
6051 }
6052 
6053 /* Set the points-to solution *PT to point only to the variables
6054    in VARS.  VARS_CONTAINS_GLOBAL specifies whether that contains
6055    global variables and VARS_CONTAINS_RESTRICT specifies whether
6056    it contains restrict tag variables.  */
6057 
6058 void
6059 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6060 {
6061   memset (pt, 0, sizeof (struct pt_solution));
6062   pt->vars = vars;
6063   pt->vars_contains_global = vars_contains_global;
6064 }
6065 
6066 /* Set the points-to solution *PT to point only to the variable VAR.  */
6067 
6068 void
6069 pt_solution_set_var (struct pt_solution *pt, tree var)
6070 {
6071   memset (pt, 0, sizeof (struct pt_solution));
6072   pt->vars = BITMAP_GGC_ALLOC ();
6073   bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6074   pt->vars_contains_global = is_global_var (var);
6075 }
6076 
6077 /* Computes the union of the points-to solutions *DEST and *SRC and
6078    stores the result in *DEST.  This changes the points-to bitmap
6079    of *DEST and thus may not be used if that might be shared.
6080    The points-to bitmap of *SRC and *DEST will not be shared after
6081    this function if they were not before.  */
6082 
6083 static void
6084 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6085 {
6086   dest->anything |= src->anything;
6087   if (dest->anything)
6088     {
6089       pt_solution_reset (dest);
6090       return;
6091     }
6092 
6093   dest->nonlocal |= src->nonlocal;
6094   dest->escaped |= src->escaped;
6095   dest->ipa_escaped |= src->ipa_escaped;
6096   dest->null |= src->null;
6097   dest->vars_contains_global |= src->vars_contains_global;
6098   if (!src->vars)
6099     return;
6100 
6101   if (!dest->vars)
6102     dest->vars = BITMAP_GGC_ALLOC ();
6103   bitmap_ior_into (dest->vars, src->vars);
6104 }
6105 
6106 /* Return true if the points-to solution *PT is empty.  */
6107 
6108 bool
6109 pt_solution_empty_p (struct pt_solution *pt)
6110 {
6111   if (pt->anything
6112       || pt->nonlocal)
6113     return false;
6114 
6115   if (pt->vars
6116       && !bitmap_empty_p (pt->vars))
6117     return false;
6118 
6119   /* If the solution includes ESCAPED, check if that is empty.  */
6120   if (pt->escaped
6121       && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6122     return false;
6123 
6124   /* If the solution includes ESCAPED, check if that is empty.  */
6125   if (pt->ipa_escaped
6126       && !pt_solution_empty_p (&ipa_escaped_pt))
6127     return false;
6128 
6129   return true;
6130 }
6131 
6132 /* Return true if the points-to solution *PT only point to a single var, and
6133    return the var uid in *UID.  */
6134 
6135 bool
6136 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6137 {
6138   if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6139       || pt->null || pt->vars == NULL
6140       || !bitmap_single_bit_set_p (pt->vars))
6141     return false;
6142 
6143   *uid = bitmap_first_set_bit (pt->vars);
6144   return true;
6145 }
6146 
6147 /* Return true if the points-to solution *PT includes global memory.  */
6148 
6149 bool
6150 pt_solution_includes_global (struct pt_solution *pt)
6151 {
6152   if (pt->anything
6153       || pt->nonlocal
6154       || pt->vars_contains_global)
6155     return true;
6156 
6157   if (pt->escaped)
6158     return pt_solution_includes_global (&cfun->gimple_df->escaped);
6159 
6160   if (pt->ipa_escaped)
6161     return pt_solution_includes_global (&ipa_escaped_pt);
6162 
6163   /* ???  This predicate is not correct for the IPA-PTA solution
6164      as we do not properly distinguish between unit escape points
6165      and global variables.  */
6166   if (cfun->gimple_df->ipa_pta)
6167     return true;
6168 
6169   return false;
6170 }
6171 
6172 /* Return true if the points-to solution *PT includes the variable
6173    declaration DECL.  */
6174 
6175 static bool
6176 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6177 {
6178   if (pt->anything)
6179     return true;
6180 
6181   if (pt->nonlocal
6182       && is_global_var (decl))
6183     return true;
6184 
6185   if (pt->vars
6186       && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6187     return true;
6188 
6189   /* If the solution includes ESCAPED, check it.  */
6190   if (pt->escaped
6191       && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6192     return true;
6193 
6194   /* If the solution includes ESCAPED, check it.  */
6195   if (pt->ipa_escaped
6196       && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6197     return true;
6198 
6199   return false;
6200 }
6201 
6202 bool
6203 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6204 {
6205   bool res = pt_solution_includes_1 (pt, decl);
6206   if (res)
6207     ++pta_stats.pt_solution_includes_may_alias;
6208   else
6209     ++pta_stats.pt_solution_includes_no_alias;
6210   return res;
6211 }
6212 
6213 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6214    intersection.  */
6215 
6216 static bool
6217 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6218 {
6219   if (pt1->anything || pt2->anything)
6220     return true;
6221 
6222   /* If either points to unknown global memory and the other points to
6223      any global memory they alias.  */
6224   if ((pt1->nonlocal
6225        && (pt2->nonlocal
6226 	   || pt2->vars_contains_global))
6227       || (pt2->nonlocal
6228 	  && pt1->vars_contains_global))
6229     return true;
6230 
6231   /* Check the escaped solution if required.  */
6232   if ((pt1->escaped || pt2->escaped)
6233       && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6234     {
6235       /* If both point to escaped memory and that solution
6236 	 is not empty they alias.  */
6237       if (pt1->escaped && pt2->escaped)
6238 	return true;
6239 
6240       /* If either points to escaped memory see if the escaped solution
6241 	 intersects with the other.  */
6242       if ((pt1->escaped
6243 	   && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6244 	  || (pt2->escaped
6245 	      && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6246 	return true;
6247     }
6248 
6249   /* Check the escaped solution if required.
6250      ???  Do we need to check the local against the IPA escaped sets?  */
6251   if ((pt1->ipa_escaped || pt2->ipa_escaped)
6252       && !pt_solution_empty_p (&ipa_escaped_pt))
6253     {
6254       /* If both point to escaped memory and that solution
6255 	 is not empty they alias.  */
6256       if (pt1->ipa_escaped && pt2->ipa_escaped)
6257 	return true;
6258 
6259       /* If either points to escaped memory see if the escaped solution
6260 	 intersects with the other.  */
6261       if ((pt1->ipa_escaped
6262 	   && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6263 	  || (pt2->ipa_escaped
6264 	      && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6265 	return true;
6266     }
6267 
6268   /* Now both pointers alias if their points-to solution intersects.  */
6269   return (pt1->vars
6270 	  && pt2->vars
6271 	  && bitmap_intersect_p (pt1->vars, pt2->vars));
6272 }
6273 
6274 bool
6275 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6276 {
6277   bool res = pt_solutions_intersect_1 (pt1, pt2);
6278   if (res)
6279     ++pta_stats.pt_solutions_intersect_may_alias;
6280   else
6281     ++pta_stats.pt_solutions_intersect_no_alias;
6282   return res;
6283 }
6284 
6285 
6286 /* Dump points-to information to OUTFILE.  */
6287 
6288 static void
6289 dump_sa_points_to_info (FILE *outfile)
6290 {
6291   unsigned int i;
6292 
6293   fprintf (outfile, "\nPoints-to sets\n\n");
6294 
6295   if (dump_flags & TDF_STATS)
6296     {
6297       fprintf (outfile, "Stats:\n");
6298       fprintf (outfile, "Total vars:               %d\n", stats.total_vars);
6299       fprintf (outfile, "Non-pointer vars:          %d\n",
6300 	       stats.nonpointer_vars);
6301       fprintf (outfile, "Statically unified vars:  %d\n",
6302 	       stats.unified_vars_static);
6303       fprintf (outfile, "Dynamically unified vars: %d\n",
6304 	       stats.unified_vars_dynamic);
6305       fprintf (outfile, "Iterations:               %d\n", stats.iterations);
6306       fprintf (outfile, "Number of edges:          %d\n", stats.num_edges);
6307       fprintf (outfile, "Number of implicit edges: %d\n",
6308 	       stats.num_implicit_edges);
6309     }
6310 
6311   for (i = 0; i < varmap.length (); i++)
6312     {
6313       varinfo_t vi = get_varinfo (i);
6314       if (!vi->may_have_pointers)
6315 	continue;
6316       dump_solution_for_var (outfile, i);
6317     }
6318 }
6319 
6320 
6321 /* Debug points-to information to stderr.  */
6322 
6323 DEBUG_FUNCTION void
6324 debug_sa_points_to_info (void)
6325 {
6326   dump_sa_points_to_info (stderr);
6327 }
6328 
6329 
6330 /* Initialize the always-existing constraint variables for NULL
6331    ANYTHING, READONLY, and INTEGER */
6332 
6333 static void
6334 init_base_vars (void)
6335 {
6336   struct constraint_expr lhs, rhs;
6337   varinfo_t var_anything;
6338   varinfo_t var_nothing;
6339   varinfo_t var_readonly;
6340   varinfo_t var_escaped;
6341   varinfo_t var_nonlocal;
6342   varinfo_t var_storedanything;
6343   varinfo_t var_integer;
6344 
6345   /* Create the NULL variable, used to represent that a variable points
6346      to NULL.  */
6347   var_nothing = new_var_info (NULL_TREE, "NULL");
6348   gcc_assert (var_nothing->id == nothing_id);
6349   var_nothing->is_artificial_var = 1;
6350   var_nothing->offset = 0;
6351   var_nothing->size = ~0;
6352   var_nothing->fullsize = ~0;
6353   var_nothing->is_special_var = 1;
6354   var_nothing->may_have_pointers = 0;
6355   var_nothing->is_global_var = 0;
6356 
6357   /* Create the ANYTHING variable, used to represent that a variable
6358      points to some unknown piece of memory.  */
6359   var_anything = new_var_info (NULL_TREE, "ANYTHING");
6360   gcc_assert (var_anything->id == anything_id);
6361   var_anything->is_artificial_var = 1;
6362   var_anything->size = ~0;
6363   var_anything->offset = 0;
6364   var_anything->next = NULL;
6365   var_anything->fullsize = ~0;
6366   var_anything->is_special_var = 1;
6367 
6368   /* Anything points to anything.  This makes deref constraints just
6369      work in the presence of linked list and other p = *p type loops,
6370      by saying that *ANYTHING = ANYTHING. */
6371   lhs.type = SCALAR;
6372   lhs.var = anything_id;
6373   lhs.offset = 0;
6374   rhs.type = ADDRESSOF;
6375   rhs.var = anything_id;
6376   rhs.offset = 0;
6377 
6378   /* This specifically does not use process_constraint because
6379      process_constraint ignores all anything = anything constraints, since all
6380      but this one are redundant.  */
6381   constraints.safe_push (new_constraint (lhs, rhs));
6382 
6383   /* Create the READONLY variable, used to represent that a variable
6384      points to readonly memory.  */
6385   var_readonly = new_var_info (NULL_TREE, "READONLY");
6386   gcc_assert (var_readonly->id == readonly_id);
6387   var_readonly->is_artificial_var = 1;
6388   var_readonly->offset = 0;
6389   var_readonly->size = ~0;
6390   var_readonly->fullsize = ~0;
6391   var_readonly->next = NULL;
6392   var_readonly->is_special_var = 1;
6393 
6394   /* readonly memory points to anything, in order to make deref
6395      easier.  In reality, it points to anything the particular
6396      readonly variable can point to, but we don't track this
6397      separately. */
6398   lhs.type = SCALAR;
6399   lhs.var = readonly_id;
6400   lhs.offset = 0;
6401   rhs.type = ADDRESSOF;
6402   rhs.var = readonly_id;  /* FIXME */
6403   rhs.offset = 0;
6404   process_constraint (new_constraint (lhs, rhs));
6405 
6406   /* Create the ESCAPED variable, used to represent the set of escaped
6407      memory.  */
6408   var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6409   gcc_assert (var_escaped->id == escaped_id);
6410   var_escaped->is_artificial_var = 1;
6411   var_escaped->offset = 0;
6412   var_escaped->size = ~0;
6413   var_escaped->fullsize = ~0;
6414   var_escaped->is_special_var = 0;
6415 
6416   /* Create the NONLOCAL variable, used to represent the set of nonlocal
6417      memory.  */
6418   var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6419   gcc_assert (var_nonlocal->id == nonlocal_id);
6420   var_nonlocal->is_artificial_var = 1;
6421   var_nonlocal->offset = 0;
6422   var_nonlocal->size = ~0;
6423   var_nonlocal->fullsize = ~0;
6424   var_nonlocal->is_special_var = 1;
6425 
6426   /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc.  */
6427   lhs.type = SCALAR;
6428   lhs.var = escaped_id;
6429   lhs.offset = 0;
6430   rhs.type = DEREF;
6431   rhs.var = escaped_id;
6432   rhs.offset = 0;
6433   process_constraint (new_constraint (lhs, rhs));
6434 
6435   /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6436      whole variable escapes.  */
6437   lhs.type = SCALAR;
6438   lhs.var = escaped_id;
6439   lhs.offset = 0;
6440   rhs.type = SCALAR;
6441   rhs.var = escaped_id;
6442   rhs.offset = UNKNOWN_OFFSET;
6443   process_constraint (new_constraint (lhs, rhs));
6444 
6445   /* *ESCAPED = NONLOCAL.  This is true because we have to assume
6446      everything pointed to by escaped points to what global memory can
6447      point to.  */
6448   lhs.type = DEREF;
6449   lhs.var = escaped_id;
6450   lhs.offset = 0;
6451   rhs.type = SCALAR;
6452   rhs.var = nonlocal_id;
6453   rhs.offset = 0;
6454   process_constraint (new_constraint (lhs, rhs));
6455 
6456   /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED.  This is true because
6457      global memory may point to global memory and escaped memory.  */
6458   lhs.type = SCALAR;
6459   lhs.var = nonlocal_id;
6460   lhs.offset = 0;
6461   rhs.type = ADDRESSOF;
6462   rhs.var = nonlocal_id;
6463   rhs.offset = 0;
6464   process_constraint (new_constraint (lhs, rhs));
6465   rhs.type = ADDRESSOF;
6466   rhs.var = escaped_id;
6467   rhs.offset = 0;
6468   process_constraint (new_constraint (lhs, rhs));
6469 
6470   /* Create the STOREDANYTHING variable, used to represent the set of
6471      variables stored to *ANYTHING.  */
6472   var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6473   gcc_assert (var_storedanything->id == storedanything_id);
6474   var_storedanything->is_artificial_var = 1;
6475   var_storedanything->offset = 0;
6476   var_storedanything->size = ~0;
6477   var_storedanything->fullsize = ~0;
6478   var_storedanything->is_special_var = 0;
6479 
6480   /* Create the INTEGER variable, used to represent that a variable points
6481      to what an INTEGER "points to".  */
6482   var_integer = new_var_info (NULL_TREE, "INTEGER");
6483   gcc_assert (var_integer->id == integer_id);
6484   var_integer->is_artificial_var = 1;
6485   var_integer->size = ~0;
6486   var_integer->fullsize = ~0;
6487   var_integer->offset = 0;
6488   var_integer->next = NULL;
6489   var_integer->is_special_var = 1;
6490 
6491   /* INTEGER = ANYTHING, because we don't know where a dereference of
6492      a random integer will point to.  */
6493   lhs.type = SCALAR;
6494   lhs.var = integer_id;
6495   lhs.offset = 0;
6496   rhs.type = ADDRESSOF;
6497   rhs.var = anything_id;
6498   rhs.offset = 0;
6499   process_constraint (new_constraint (lhs, rhs));
6500 }
6501 
6502 /* Initialize things necessary to perform PTA */
6503 
6504 static void
6505 init_alias_vars (void)
6506 {
6507   use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6508 
6509   bitmap_obstack_initialize (&pta_obstack);
6510   bitmap_obstack_initialize (&oldpta_obstack);
6511   bitmap_obstack_initialize (&predbitmap_obstack);
6512 
6513   constraint_pool = create_alloc_pool ("Constraint pool",
6514 				       sizeof (struct constraint), 30);
6515   variable_info_pool = create_alloc_pool ("Variable info pool",
6516 					  sizeof (struct variable_info), 30);
6517   constraints.create (8);
6518   varmap.create (8);
6519   vi_for_tree = pointer_map_create ();
6520   call_stmt_vars = pointer_map_create ();
6521 
6522   memset (&stats, 0, sizeof (stats));
6523   shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6524 				     shared_bitmap_eq, free);
6525   init_base_vars ();
6526 
6527   gcc_obstack_init (&fake_var_decl_obstack);
6528 
6529   final_solutions = pointer_map_create ();
6530   gcc_obstack_init (&final_solutions_obstack);
6531 }
6532 
6533 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6534    predecessor edges.  */
6535 
6536 static void
6537 remove_preds_and_fake_succs (constraint_graph_t graph)
6538 {
6539   unsigned int i;
6540 
6541   /* Clear the implicit ref and address nodes from the successor
6542      lists.  */
6543   for (i = 0; i < FIRST_REF_NODE; i++)
6544     {
6545       if (graph->succs[i])
6546 	bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6547 			    FIRST_REF_NODE * 2);
6548     }
6549 
6550   /* Free the successor list for the non-ref nodes.  */
6551   for (i = FIRST_REF_NODE; i < graph->size; i++)
6552     {
6553       if (graph->succs[i])
6554 	BITMAP_FREE (graph->succs[i]);
6555     }
6556 
6557   /* Now reallocate the size of the successor list as, and blow away
6558      the predecessor bitmaps.  */
6559   graph->size = varmap.length ();
6560   graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6561 
6562   free (graph->implicit_preds);
6563   graph->implicit_preds = NULL;
6564   free (graph->preds);
6565   graph->preds = NULL;
6566   bitmap_obstack_release (&predbitmap_obstack);
6567 }
6568 
6569 /* Solve the constraint set.  */
6570 
6571 static void
6572 solve_constraints (void)
6573 {
6574   struct scc_info *si;
6575 
6576   if (dump_file)
6577     fprintf (dump_file,
6578 	     "\nCollapsing static cycles and doing variable "
6579 	     "substitution\n");
6580 
6581   init_graph (varmap.length () * 2);
6582 
6583   if (dump_file)
6584     fprintf (dump_file, "Building predecessor graph\n");
6585   build_pred_graph ();
6586 
6587   if (dump_file)
6588     fprintf (dump_file, "Detecting pointer and location "
6589 	     "equivalences\n");
6590   si = perform_var_substitution (graph);
6591 
6592   if (dump_file)
6593     fprintf (dump_file, "Rewriting constraints and unifying "
6594 	     "variables\n");
6595   rewrite_constraints (graph, si);
6596 
6597   build_succ_graph ();
6598 
6599   free_var_substitution_info (si);
6600 
6601   /* Attach complex constraints to graph nodes.  */
6602   move_complex_constraints (graph);
6603 
6604   if (dump_file)
6605     fprintf (dump_file, "Uniting pointer but not location equivalent "
6606 	     "variables\n");
6607   unite_pointer_equivalences (graph);
6608 
6609   if (dump_file)
6610     fprintf (dump_file, "Finding indirect cycles\n");
6611   find_indirect_cycles (graph);
6612 
6613   /* Implicit nodes and predecessors are no longer necessary at this
6614      point. */
6615   remove_preds_and_fake_succs (graph);
6616 
6617   if (dump_file && (dump_flags & TDF_GRAPH))
6618     {
6619       fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6620 	       "in dot format:\n");
6621       dump_constraint_graph (dump_file);
6622       fprintf (dump_file, "\n\n");
6623     }
6624 
6625   if (dump_file)
6626     fprintf (dump_file, "Solving graph\n");
6627 
6628   solve_graph (graph);
6629 
6630   if (dump_file && (dump_flags & TDF_GRAPH))
6631     {
6632       fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6633 	       "in dot format:\n");
6634       dump_constraint_graph (dump_file);
6635       fprintf (dump_file, "\n\n");
6636     }
6637 
6638   if (dump_file)
6639     dump_sa_points_to_info (dump_file);
6640 }
6641 
6642 /* Create points-to sets for the current function.  See the comments
6643    at the start of the file for an algorithmic overview.  */
6644 
6645 static void
6646 compute_points_to_sets (void)
6647 {
6648   basic_block bb;
6649   unsigned i;
6650   varinfo_t vi;
6651 
6652   timevar_push (TV_TREE_PTA);
6653 
6654   init_alias_vars ();
6655 
6656   intra_create_variable_infos ();
6657 
6658   /* Now walk all statements and build the constraint set.  */
6659   FOR_EACH_BB (bb)
6660     {
6661       gimple_stmt_iterator gsi;
6662 
6663       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6664 	{
6665 	  gimple phi = gsi_stmt (gsi);
6666 
6667 	  if (! virtual_operand_p (gimple_phi_result (phi)))
6668 	    find_func_aliases (phi);
6669 	}
6670 
6671       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6672 	{
6673 	  gimple stmt = gsi_stmt (gsi);
6674 
6675 	  find_func_aliases (stmt);
6676 	}
6677     }
6678 
6679   if (dump_file)
6680     {
6681       fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6682       dump_constraints (dump_file, 0);
6683     }
6684 
6685   /* From the constraints compute the points-to sets.  */
6686   solve_constraints ();
6687 
6688   /* Compute the points-to set for ESCAPED used for call-clobber analysis.  */
6689   cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6690 
6691   /* Make sure the ESCAPED solution (which is used as placeholder in
6692      other solutions) does not reference itself.  This simplifies
6693      points-to solution queries.  */
6694   cfun->gimple_df->escaped.escaped = 0;
6695 
6696   /* Mark escaped HEAP variables as global.  */
6697   FOR_EACH_VEC_ELT (varmap, i, vi)
6698     if (vi->is_heap_var
6699 	&& !vi->is_global_var)
6700       DECL_EXTERNAL (vi->decl) = vi->is_global_var
6701 	= pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6702 
6703   /* Compute the points-to sets for pointer SSA_NAMEs.  */
6704   for (i = 0; i < num_ssa_names; ++i)
6705     {
6706       tree ptr = ssa_name (i);
6707       if (ptr
6708 	  && POINTER_TYPE_P (TREE_TYPE (ptr)))
6709 	find_what_p_points_to (ptr);
6710     }
6711 
6712   /* Compute the call-used/clobbered sets.  */
6713   FOR_EACH_BB (bb)
6714     {
6715       gimple_stmt_iterator gsi;
6716 
6717       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6718 	{
6719 	  gimple stmt = gsi_stmt (gsi);
6720 	  struct pt_solution *pt;
6721 	  if (!is_gimple_call (stmt))
6722 	    continue;
6723 
6724 	  pt = gimple_call_use_set (stmt);
6725 	  if (gimple_call_flags (stmt) & ECF_CONST)
6726 	    memset (pt, 0, sizeof (struct pt_solution));
6727 	  else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6728 	    {
6729 	      *pt = find_what_var_points_to (vi);
6730 	      /* Escaped (and thus nonlocal) variables are always
6731 	         implicitly used by calls.  */
6732 	      /* ???  ESCAPED can be empty even though NONLOCAL
6733 		 always escaped.  */
6734 	      pt->nonlocal = 1;
6735 	      pt->escaped = 1;
6736 	    }
6737 	  else
6738 	    {
6739 	      /* If there is nothing special about this call then
6740 		 we have made everything that is used also escape.  */
6741 	      *pt = cfun->gimple_df->escaped;
6742 	      pt->nonlocal = 1;
6743 	    }
6744 
6745 	  pt = gimple_call_clobber_set (stmt);
6746 	  if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6747 	    memset (pt, 0, sizeof (struct pt_solution));
6748 	  else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6749 	    {
6750 	      *pt = find_what_var_points_to (vi);
6751 	      /* Escaped (and thus nonlocal) variables are always
6752 	         implicitly clobbered by calls.  */
6753 	      /* ???  ESCAPED can be empty even though NONLOCAL
6754 		 always escaped.  */
6755 	      pt->nonlocal = 1;
6756 	      pt->escaped = 1;
6757 	    }
6758 	  else
6759 	    {
6760 	      /* If there is nothing special about this call then
6761 		 we have made everything that is used also escape.  */
6762 	      *pt = cfun->gimple_df->escaped;
6763 	      pt->nonlocal = 1;
6764 	    }
6765 	}
6766     }
6767 
6768   timevar_pop (TV_TREE_PTA);
6769 }
6770 
6771 
6772 /* Delete created points-to sets.  */
6773 
6774 static void
6775 delete_points_to_sets (void)
6776 {
6777   unsigned int i;
6778 
6779   htab_delete (shared_bitmap_table);
6780   if (dump_file && (dump_flags & TDF_STATS))
6781     fprintf (dump_file, "Points to sets created:%d\n",
6782 	     stats.points_to_sets_created);
6783 
6784   pointer_map_destroy (vi_for_tree);
6785   pointer_map_destroy (call_stmt_vars);
6786   bitmap_obstack_release (&pta_obstack);
6787   constraints.release ();
6788 
6789   for (i = 0; i < graph->size; i++)
6790     graph->complex[i].release ();
6791   free (graph->complex);
6792 
6793   free (graph->rep);
6794   free (graph->succs);
6795   free (graph->pe);
6796   free (graph->pe_rep);
6797   free (graph->indirect_cycles);
6798   free (graph);
6799 
6800   varmap.release ();
6801   free_alloc_pool (variable_info_pool);
6802   free_alloc_pool (constraint_pool);
6803 
6804   obstack_free (&fake_var_decl_obstack, NULL);
6805 
6806   pointer_map_destroy (final_solutions);
6807   obstack_free (&final_solutions_obstack, NULL);
6808 }
6809 
6810 
6811 /* Compute points-to information for every SSA_NAME pointer in the
6812    current function and compute the transitive closure of escaped
6813    variables to re-initialize the call-clobber states of local variables.  */
6814 
6815 unsigned int
6816 compute_may_aliases (void)
6817 {
6818   if (cfun->gimple_df->ipa_pta)
6819     {
6820       if (dump_file)
6821 	{
6822 	  fprintf (dump_file, "\nNot re-computing points-to information "
6823 		   "because IPA points-to information is available.\n\n");
6824 
6825 	  /* But still dump what we have remaining it.  */
6826 	  dump_alias_info (dump_file);
6827 	}
6828 
6829       return 0;
6830     }
6831 
6832   /* For each pointer P_i, determine the sets of variables that P_i may
6833      point-to.  Compute the reachability set of escaped and call-used
6834      variables.  */
6835   compute_points_to_sets ();
6836 
6837   /* Debugging dumps.  */
6838   if (dump_file)
6839     dump_alias_info (dump_file);
6840 
6841   /* Deallocate memory used by aliasing data structures and the internal
6842      points-to solution.  */
6843   delete_points_to_sets ();
6844 
6845   gcc_assert (!need_ssa_update_p (cfun));
6846 
6847   return 0;
6848 }
6849 
6850 static bool
6851 gate_tree_pta (void)
6852 {
6853   return flag_tree_pta;
6854 }
6855 
6856 /* A dummy pass to cause points-to information to be computed via
6857    TODO_rebuild_alias.  */
6858 
6859 struct gimple_opt_pass pass_build_alias =
6860 {
6861  {
6862   GIMPLE_PASS,
6863   "alias",		    /* name */
6864   OPTGROUP_NONE,            /* optinfo_flags */
6865   gate_tree_pta,	    /* gate */
6866   NULL,                     /* execute */
6867   NULL,                     /* sub */
6868   NULL,                     /* next */
6869   0,                        /* static_pass_number */
6870   TV_NONE,                  /* tv_id */
6871   PROP_cfg | PROP_ssa,      /* properties_required */
6872   0,			    /* properties_provided */
6873   0,                        /* properties_destroyed */
6874   0,                        /* todo_flags_start */
6875   TODO_rebuild_alias        /* todo_flags_finish */
6876  }
6877 };
6878 
6879 /* A dummy pass to cause points-to information to be computed via
6880    TODO_rebuild_alias.  */
6881 
6882 struct gimple_opt_pass pass_build_ealias =
6883 {
6884  {
6885   GIMPLE_PASS,
6886   "ealias",		    /* name */
6887   OPTGROUP_NONE,            /* optinfo_flags */
6888   gate_tree_pta,	    /* gate */
6889   NULL,                     /* execute */
6890   NULL,                     /* sub */
6891   NULL,                     /* next */
6892   0,                        /* static_pass_number */
6893   TV_NONE,                  /* tv_id */
6894   PROP_cfg | PROP_ssa,      /* properties_required */
6895   0,			    /* properties_provided */
6896   0,                        /* properties_destroyed */
6897   0,                        /* todo_flags_start */
6898   TODO_rebuild_alias        /* todo_flags_finish */
6899  }
6900 };
6901 
6902 
6903 /* Return true if we should execute IPA PTA.  */
6904 static bool
6905 gate_ipa_pta (void)
6906 {
6907   return (optimize
6908 	  && flag_ipa_pta
6909 	  /* Don't bother doing anything if the program has errors.  */
6910 	  && !seen_error ());
6911 }
6912 
6913 /* IPA PTA solutions for ESCAPED.  */
6914 struct pt_solution ipa_escaped_pt
6915   = { true, false, false, false, false, false, NULL };
6916 
6917 /* Associate node with varinfo DATA. Worker for
6918    cgraph_for_node_and_aliases.  */
6919 static bool
6920 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
6921 {
6922   if (node->alias || node->thunk.thunk_p)
6923     insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
6924   return false;
6925 }
6926 
6927 /* Execute the driver for IPA PTA.  */
6928 static unsigned int
6929 ipa_pta_execute (void)
6930 {
6931   struct cgraph_node *node;
6932   struct varpool_node *var;
6933   int from;
6934 
6935   in_ipa_mode = 1;
6936 
6937   init_alias_vars ();
6938 
6939   if (dump_file && (dump_flags & TDF_DETAILS))
6940     {
6941       dump_symtab (dump_file);
6942       fprintf (dump_file, "\n");
6943     }
6944 
6945   /* Build the constraints.  */
6946   FOR_EACH_DEFINED_FUNCTION (node)
6947     {
6948       varinfo_t vi;
6949       /* Nodes without a body are not interesting.  Especially do not
6950          visit clones at this point for now - we get duplicate decls
6951 	 there for inline clones at least.  */
6952       if (!cgraph_function_with_gimple_body_p (node))
6953 	continue;
6954 
6955       gcc_assert (!node->clone_of);
6956 
6957       vi = create_function_info_for (node->symbol.decl,
6958 			             alias_get_name (node->symbol.decl));
6959       cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
6960     }
6961 
6962   /* Create constraints for global variables and their initializers.  */
6963   FOR_EACH_VARIABLE (var)
6964     {
6965       if (var->alias)
6966 	continue;
6967 
6968       get_vi_for_tree (var->symbol.decl);
6969     }
6970 
6971   if (dump_file)
6972     {
6973       fprintf (dump_file,
6974 	       "Generating constraints for global initializers\n\n");
6975       dump_constraints (dump_file, 0);
6976       fprintf (dump_file, "\n");
6977     }
6978   from = constraints.length ();
6979 
6980   FOR_EACH_DEFINED_FUNCTION (node)
6981     {
6982       struct function *func;
6983       basic_block bb;
6984 
6985       /* Nodes without a body are not interesting.  */
6986       if (!cgraph_function_with_gimple_body_p (node))
6987 	continue;
6988 
6989       if (dump_file)
6990 	{
6991 	  fprintf (dump_file,
6992 		   "Generating constraints for %s", cgraph_node_name (node));
6993 	  if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
6994 	    fprintf (dump_file, " (%s)",
6995 		     IDENTIFIER_POINTER
6996 		       (DECL_ASSEMBLER_NAME (node->symbol.decl)));
6997 	  fprintf (dump_file, "\n");
6998 	}
6999 
7000       func = DECL_STRUCT_FUNCTION (node->symbol.decl);
7001       push_cfun (func);
7002 
7003       /* For externally visible or attribute used annotated functions use
7004 	 local constraints for their arguments.
7005 	 For local functions we see all callers and thus do not need initial
7006 	 constraints for parameters.  */
7007       if (node->symbol.used_from_other_partition
7008 	  || node->symbol.externally_visible
7009 	  || node->symbol.force_output)
7010 	{
7011 	  intra_create_variable_infos ();
7012 
7013 	  /* We also need to make function return values escape.  Nothing
7014 	     escapes by returning from main though.  */
7015 	  if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
7016 	    {
7017 	      varinfo_t fi, rvi;
7018 	      fi = lookup_vi_for_tree (node->symbol.decl);
7019 	      rvi = first_vi_for_offset (fi, fi_result);
7020 	      if (rvi && rvi->offset == fi_result)
7021 		{
7022 		  struct constraint_expr includes;
7023 		  struct constraint_expr var;
7024 		  includes.var = escaped_id;
7025 		  includes.offset = 0;
7026 		  includes.type = SCALAR;
7027 		  var.var = rvi->id;
7028 		  var.offset = 0;
7029 		  var.type = SCALAR;
7030 		  process_constraint (new_constraint (includes, var));
7031 		}
7032 	    }
7033 	}
7034 
7035       /* Build constriants for the function body.  */
7036       FOR_EACH_BB_FN (bb, func)
7037 	{
7038 	  gimple_stmt_iterator gsi;
7039 
7040 	  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7041 	       gsi_next (&gsi))
7042 	    {
7043 	      gimple phi = gsi_stmt (gsi);
7044 
7045 	      if (! virtual_operand_p (gimple_phi_result (phi)))
7046 		find_func_aliases (phi);
7047 	    }
7048 
7049 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7050 	    {
7051 	      gimple stmt = gsi_stmt (gsi);
7052 
7053 	      find_func_aliases (stmt);
7054 	      find_func_clobbers (stmt);
7055 	    }
7056 	}
7057 
7058       pop_cfun ();
7059 
7060       if (dump_file)
7061 	{
7062 	  fprintf (dump_file, "\n");
7063 	  dump_constraints (dump_file, from);
7064 	  fprintf (dump_file, "\n");
7065 	}
7066       from = constraints.length ();
7067     }
7068 
7069   /* From the constraints compute the points-to sets.  */
7070   solve_constraints ();
7071 
7072   /* Compute the global points-to sets for ESCAPED.
7073      ???  Note that the computed escape set is not correct
7074      for the whole unit as we fail to consider graph edges to
7075      externally visible functions.  */
7076   ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7077 
7078   /* Make sure the ESCAPED solution (which is used as placeholder in
7079      other solutions) does not reference itself.  This simplifies
7080      points-to solution queries.  */
7081   ipa_escaped_pt.ipa_escaped = 0;
7082 
7083   /* Assign the points-to sets to the SSA names in the unit.  */
7084   FOR_EACH_DEFINED_FUNCTION (node)
7085     {
7086       tree ptr;
7087       struct function *fn;
7088       unsigned i;
7089       varinfo_t fi;
7090       basic_block bb;
7091       struct pt_solution uses, clobbers;
7092       struct cgraph_edge *e;
7093 
7094       /* Nodes without a body are not interesting.  */
7095       if (!cgraph_function_with_gimple_body_p (node))
7096 	continue;
7097 
7098       fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7099 
7100       /* Compute the points-to sets for pointer SSA_NAMEs.  */
7101       FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7102 	{
7103 	  if (ptr
7104 	      && POINTER_TYPE_P (TREE_TYPE (ptr)))
7105 	    find_what_p_points_to (ptr);
7106 	}
7107 
7108       /* Compute the call-use and call-clobber sets for all direct calls.  */
7109       fi = lookup_vi_for_tree (node->symbol.decl);
7110       gcc_assert (fi->is_fn_info);
7111       clobbers
7112 	= find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7113       uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7114       for (e = node->callers; e; e = e->next_caller)
7115 	{
7116 	  if (!e->call_stmt)
7117 	    continue;
7118 
7119 	  *gimple_call_clobber_set (e->call_stmt) = clobbers;
7120 	  *gimple_call_use_set (e->call_stmt) = uses;
7121 	}
7122 
7123       /* Compute the call-use and call-clobber sets for indirect calls
7124 	 and calls to external functions.  */
7125       FOR_EACH_BB_FN (bb, fn)
7126 	{
7127 	  gimple_stmt_iterator gsi;
7128 
7129 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7130 	    {
7131 	      gimple stmt = gsi_stmt (gsi);
7132 	      struct pt_solution *pt;
7133 	      varinfo_t vi;
7134 	      tree decl;
7135 
7136 	      if (!is_gimple_call (stmt))
7137 		continue;
7138 
7139 	      /* Handle direct calls to external functions.  */
7140 	      decl = gimple_call_fndecl (stmt);
7141 	      if (decl
7142 		  && (!(fi = lookup_vi_for_tree (decl))
7143 		      || !fi->is_fn_info))
7144 		{
7145 		  pt = gimple_call_use_set (stmt);
7146 		  if (gimple_call_flags (stmt) & ECF_CONST)
7147 		    memset (pt, 0, sizeof (struct pt_solution));
7148 		  else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7149 		    {
7150 		      *pt = find_what_var_points_to (vi);
7151 		      /* Escaped (and thus nonlocal) variables are always
7152 			 implicitly used by calls.  */
7153 		      /* ???  ESCAPED can be empty even though NONLOCAL
7154 			 always escaped.  */
7155 		      pt->nonlocal = 1;
7156 		      pt->ipa_escaped = 1;
7157 		    }
7158 		  else
7159 		    {
7160 		      /* If there is nothing special about this call then
7161 			 we have made everything that is used also escape.  */
7162 		      *pt = ipa_escaped_pt;
7163 		      pt->nonlocal = 1;
7164 		    }
7165 
7166 		  pt = gimple_call_clobber_set (stmt);
7167 		  if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7168 		    memset (pt, 0, sizeof (struct pt_solution));
7169 		  else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7170 		    {
7171 		      *pt = find_what_var_points_to (vi);
7172 		      /* Escaped (and thus nonlocal) variables are always
7173 			 implicitly clobbered by calls.  */
7174 		      /* ???  ESCAPED can be empty even though NONLOCAL
7175 			 always escaped.  */
7176 		      pt->nonlocal = 1;
7177 		      pt->ipa_escaped = 1;
7178 		    }
7179 		  else
7180 		    {
7181 		      /* If there is nothing special about this call then
7182 			 we have made everything that is used also escape.  */
7183 		      *pt = ipa_escaped_pt;
7184 		      pt->nonlocal = 1;
7185 		    }
7186 		}
7187 
7188 	      /* Handle indirect calls.  */
7189 	      if (!decl
7190 		  && (fi = get_fi_for_callee (stmt)))
7191 		{
7192 		  /* We need to accumulate all clobbers/uses of all possible
7193 		     callees.  */
7194 		  fi = get_varinfo (find (fi->id));
7195 		  /* If we cannot constrain the set of functions we'll end up
7196 		     calling we end up using/clobbering everything.  */
7197 		  if (bitmap_bit_p (fi->solution, anything_id)
7198 		      || bitmap_bit_p (fi->solution, nonlocal_id)
7199 		      || bitmap_bit_p (fi->solution, escaped_id))
7200 		    {
7201 		      pt_solution_reset (gimple_call_clobber_set (stmt));
7202 		      pt_solution_reset (gimple_call_use_set (stmt));
7203 		    }
7204 		  else
7205 		    {
7206 		      bitmap_iterator bi;
7207 		      unsigned i;
7208 		      struct pt_solution *uses, *clobbers;
7209 
7210 		      uses = gimple_call_use_set (stmt);
7211 		      clobbers = gimple_call_clobber_set (stmt);
7212 		      memset (uses, 0, sizeof (struct pt_solution));
7213 		      memset (clobbers, 0, sizeof (struct pt_solution));
7214 		      EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7215 			{
7216 			  struct pt_solution sol;
7217 
7218 			  vi = get_varinfo (i);
7219 			  if (!vi->is_fn_info)
7220 			    {
7221 			      /* ???  We could be more precise here?  */
7222 			      uses->nonlocal = 1;
7223 			      uses->ipa_escaped = 1;
7224 			      clobbers->nonlocal = 1;
7225 			      clobbers->ipa_escaped = 1;
7226 			      continue;
7227 			    }
7228 
7229 			  if (!uses->anything)
7230 			    {
7231 			      sol = find_what_var_points_to
7232 				      (first_vi_for_offset (vi, fi_uses));
7233 			      pt_solution_ior_into (uses, &sol);
7234 			    }
7235 			  if (!clobbers->anything)
7236 			    {
7237 			      sol = find_what_var_points_to
7238 				      (first_vi_for_offset (vi, fi_clobbers));
7239 			      pt_solution_ior_into (clobbers, &sol);
7240 			    }
7241 			}
7242 		    }
7243 		}
7244 	    }
7245 	}
7246 
7247       fn->gimple_df->ipa_pta = true;
7248     }
7249 
7250   delete_points_to_sets ();
7251 
7252   in_ipa_mode = 0;
7253 
7254   return 0;
7255 }
7256 
7257 struct simple_ipa_opt_pass pass_ipa_pta =
7258 {
7259  {
7260   SIMPLE_IPA_PASS,
7261   "pta",		                /* name */
7262   OPTGROUP_NONE,                        /* optinfo_flags */
7263   gate_ipa_pta,			/* gate */
7264   ipa_pta_execute,			/* execute */
7265   NULL,					/* sub */
7266   NULL,					/* next */
7267   0,					/* static_pass_number */
7268   TV_IPA_PTA,		        /* tv_id */
7269   0,	                                /* properties_required */
7270   0,					/* properties_provided */
7271   0,					/* properties_destroyed */
7272   0,					/* todo_flags_start */
7273   TODO_update_ssa                       /* todo_flags_finish */
7274  }
7275 };
7276