1*404b540aSrobert /* Alias analysis for trees.
2*404b540aSrobert Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3*404b540aSrobert Contributed by Diego Novillo <dnovillo@redhat.com>
4*404b540aSrobert
5*404b540aSrobert This file is part of GCC.
6*404b540aSrobert
7*404b540aSrobert GCC is free software; you can redistribute it and/or modify
8*404b540aSrobert it under the terms of the GNU General Public License as published by
9*404b540aSrobert the Free Software Foundation; either version 2, or (at your option)
10*404b540aSrobert any later version.
11*404b540aSrobert
12*404b540aSrobert GCC is distributed in the hope that it will be useful,
13*404b540aSrobert but WITHOUT ANY WARRANTY; without even the implied warranty of
14*404b540aSrobert MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15*404b540aSrobert GNU General Public License for more details.
16*404b540aSrobert
17*404b540aSrobert You should have received a copy of the GNU General Public License
18*404b540aSrobert along with GCC; see the file COPYING. If not, write to
19*404b540aSrobert the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20*404b540aSrobert Boston, MA 02110-1301, USA. */
21*404b540aSrobert
22*404b540aSrobert #include "config.h"
23*404b540aSrobert #include "system.h"
24*404b540aSrobert #include "coretypes.h"
25*404b540aSrobert #include "tm.h"
26*404b540aSrobert #include "tree.h"
27*404b540aSrobert #include "rtl.h"
28*404b540aSrobert #include "tm_p.h"
29*404b540aSrobert #include "hard-reg-set.h"
30*404b540aSrobert #include "basic-block.h"
31*404b540aSrobert #include "timevar.h"
32*404b540aSrobert #include "expr.h"
33*404b540aSrobert #include "ggc.h"
34*404b540aSrobert #include "langhooks.h"
35*404b540aSrobert #include "flags.h"
36*404b540aSrobert #include "function.h"
37*404b540aSrobert #include "diagnostic.h"
38*404b540aSrobert #include "tree-dump.h"
39*404b540aSrobert #include "tree-gimple.h"
40*404b540aSrobert #include "tree-flow.h"
41*404b540aSrobert #include "tree-inline.h"
42*404b540aSrobert #include "tree-pass.h"
43*404b540aSrobert #include "tree-ssa-structalias.h"
44*404b540aSrobert #include "convert.h"
45*404b540aSrobert #include "params.h"
46*404b540aSrobert #include "ipa-type-escape.h"
47*404b540aSrobert #include "vec.h"
48*404b540aSrobert #include "bitmap.h"
49*404b540aSrobert #include "vecprim.h"
50*404b540aSrobert #include "pointer-set.h"
51*404b540aSrobert
52*404b540aSrobert /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
53*404b540aSrobert aliasing */
54*404b540aSrobert static bitmap_obstack alias_obstack;
55*404b540aSrobert
56*404b540aSrobert /* 'true' after aliases have been computed (see compute_may_aliases). */
57*404b540aSrobert bool aliases_computed_p;
58*404b540aSrobert
59*404b540aSrobert /* Structure to map a variable to its alias set and keep track of the
60*404b540aSrobert virtual operands that will be needed to represent it. */
61*404b540aSrobert struct alias_map_d
62*404b540aSrobert {
63*404b540aSrobert /* Variable and its alias set. */
64*404b540aSrobert tree var;
65*404b540aSrobert HOST_WIDE_INT set;
66*404b540aSrobert
67*404b540aSrobert /* Total number of virtual operands that will be needed to represent
68*404b540aSrobert all the aliases of VAR. */
69*404b540aSrobert long total_alias_vops;
70*404b540aSrobert
71*404b540aSrobert /* Nonzero if the aliases for this memory tag have been grouped
72*404b540aSrobert already. Used in group_aliases. */
73*404b540aSrobert unsigned int grouped_p : 1;
74*404b540aSrobert
75*404b540aSrobert /* Set of variables aliased with VAR. This is the exact same
76*404b540aSrobert information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
77*404b540aSrobert bitmap form to speed up alias grouping. */
78*404b540aSrobert bitmap may_aliases;
79*404b540aSrobert };
80*404b540aSrobert
81*404b540aSrobert
82*404b540aSrobert /* Counters used to display statistics on alias analysis. */
83*404b540aSrobert struct alias_stats_d
84*404b540aSrobert {
85*404b540aSrobert unsigned int alias_queries;
86*404b540aSrobert unsigned int alias_mayalias;
87*404b540aSrobert unsigned int alias_noalias;
88*404b540aSrobert unsigned int simple_queries;
89*404b540aSrobert unsigned int simple_resolved;
90*404b540aSrobert unsigned int tbaa_queries;
91*404b540aSrobert unsigned int tbaa_resolved;
92*404b540aSrobert unsigned int structnoaddress_queries;
93*404b540aSrobert unsigned int structnoaddress_resolved;
94*404b540aSrobert };
95*404b540aSrobert
96*404b540aSrobert
97*404b540aSrobert /* Local variables. */
98*404b540aSrobert static struct alias_stats_d alias_stats;
99*404b540aSrobert
100*404b540aSrobert /* Local functions. */
101*404b540aSrobert static void compute_flow_insensitive_aliasing (struct alias_info *);
102*404b540aSrobert static void finalize_ref_all_pointers (struct alias_info *);
103*404b540aSrobert static void dump_alias_stats (FILE *);
104*404b540aSrobert static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
105*404b540aSrobert static tree create_memory_tag (tree type, bool is_type_tag);
106*404b540aSrobert static tree get_tmt_for (tree, struct alias_info *);
107*404b540aSrobert static tree get_nmt_for (tree);
108*404b540aSrobert static void add_may_alias (tree, tree);
109*404b540aSrobert static void replace_may_alias (tree, size_t, tree);
110*404b540aSrobert static struct alias_info *init_alias_info (void);
111*404b540aSrobert static void delete_alias_info (struct alias_info *);
112*404b540aSrobert static void compute_flow_sensitive_aliasing (struct alias_info *);
113*404b540aSrobert static void setup_pointers_and_addressables (struct alias_info *);
114*404b540aSrobert static void create_global_var (void);
115*404b540aSrobert static void maybe_create_global_var (struct alias_info *ai);
116*404b540aSrobert static void group_aliases (struct alias_info *);
117*404b540aSrobert static void set_pt_anything (tree ptr);
118*404b540aSrobert
119*404b540aSrobert /* Global declarations. */
120*404b540aSrobert
121*404b540aSrobert /* Call clobbered variables in the function. If bit I is set, then
122*404b540aSrobert REFERENCED_VARS (I) is call-clobbered. */
123*404b540aSrobert bitmap call_clobbered_vars;
124*404b540aSrobert
125*404b540aSrobert /* Addressable variables in the function. If bit I is set, then
126*404b540aSrobert REFERENCED_VARS (I) has had its address taken. Note that
127*404b540aSrobert CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
128*404b540aSrobert addressable variable is not necessarily call-clobbered (e.g., a
129*404b540aSrobert local addressable whose address does not escape) and not all
130*404b540aSrobert call-clobbered variables are addressable (e.g., a local static
131*404b540aSrobert variable). */
132*404b540aSrobert bitmap addressable_vars;
133*404b540aSrobert
134*404b540aSrobert /* When the program has too many call-clobbered variables and call-sites,
135*404b540aSrobert this variable is used to represent the clobbering effects of function
136*404b540aSrobert calls. In these cases, all the call clobbered variables in the program
137*404b540aSrobert are forced to alias this variable. This reduces compile times by not
138*404b540aSrobert having to keep track of too many V_MAY_DEF expressions at call sites. */
139*404b540aSrobert tree global_var;
140*404b540aSrobert
141*404b540aSrobert /* qsort comparison function to sort type/name tags by DECL_UID. */
142*404b540aSrobert
143*404b540aSrobert static int
sort_tags_by_id(const void * pa,const void * pb)144*404b540aSrobert sort_tags_by_id (const void *pa, const void *pb)
145*404b540aSrobert {
146*404b540aSrobert tree a = *(tree *)pa;
147*404b540aSrobert tree b = *(tree *)pb;
148*404b540aSrobert
149*404b540aSrobert return DECL_UID (a) - DECL_UID (b);
150*404b540aSrobert }
151*404b540aSrobert
152*404b540aSrobert /* Initialize WORKLIST to contain those memory tags that are marked call
153*404b540aSrobert clobbered. Initialized WORKLIST2 to contain the reasons these
154*404b540aSrobert memory tags escaped. */
155*404b540aSrobert
156*404b540aSrobert static void
init_transitive_clobber_worklist(VEC (tree,heap)** worklist,VEC (int,heap)** worklist2)157*404b540aSrobert init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
158*404b540aSrobert VEC (int, heap) **worklist2)
159*404b540aSrobert {
160*404b540aSrobert referenced_var_iterator rvi;
161*404b540aSrobert tree curr;
162*404b540aSrobert
163*404b540aSrobert FOR_EACH_REFERENCED_VAR (curr, rvi)
164*404b540aSrobert {
165*404b540aSrobert if (MTAG_P (curr) && is_call_clobbered (curr))
166*404b540aSrobert {
167*404b540aSrobert VEC_safe_push (tree, heap, *worklist, curr);
168*404b540aSrobert VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
169*404b540aSrobert }
170*404b540aSrobert }
171*404b540aSrobert }
172*404b540aSrobert
173*404b540aSrobert /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
174*404b540aSrobert ALIAS is not already marked call clobbered, and is a memory
175*404b540aSrobert tag. */
176*404b540aSrobert
177*404b540aSrobert static void
add_to_worklist(tree alias,VEC (tree,heap)** worklist,VEC (int,heap)** worklist2,int reason)178*404b540aSrobert add_to_worklist (tree alias, VEC (tree, heap) **worklist,
179*404b540aSrobert VEC (int, heap) **worklist2,
180*404b540aSrobert int reason)
181*404b540aSrobert {
182*404b540aSrobert if (MTAG_P (alias) && !is_call_clobbered (alias))
183*404b540aSrobert {
184*404b540aSrobert VEC_safe_push (tree, heap, *worklist, alias);
185*404b540aSrobert VEC_safe_push (int, heap, *worklist2, reason);
186*404b540aSrobert }
187*404b540aSrobert }
188*404b540aSrobert
189*404b540aSrobert /* Mark aliases of TAG as call clobbered, and place any tags on the
190*404b540aSrobert alias list that were not already call clobbered on WORKLIST. */
191*404b540aSrobert
192*404b540aSrobert static void
mark_aliases_call_clobbered(tree tag,VEC (tree,heap)** worklist,VEC (int,heap)** worklist2)193*404b540aSrobert mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
194*404b540aSrobert VEC (int, heap) **worklist2)
195*404b540aSrobert {
196*404b540aSrobert unsigned int i;
197*404b540aSrobert VEC (tree, gc) *ma;
198*404b540aSrobert tree entry;
199*404b540aSrobert var_ann_t ta = var_ann (tag);
200*404b540aSrobert
201*404b540aSrobert if (!MTAG_P (tag))
202*404b540aSrobert return;
203*404b540aSrobert ma = may_aliases (tag);
204*404b540aSrobert if (!ma)
205*404b540aSrobert return;
206*404b540aSrobert
207*404b540aSrobert for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
208*404b540aSrobert {
209*404b540aSrobert if (!unmodifiable_var_p (entry))
210*404b540aSrobert {
211*404b540aSrobert add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
212*404b540aSrobert mark_call_clobbered (entry, ta->escape_mask);
213*404b540aSrobert }
214*404b540aSrobert }
215*404b540aSrobert }
216*404b540aSrobert
217*404b540aSrobert /* Tags containing global vars need to be marked as global.
218*404b540aSrobert Tags containing call clobbered vars need to be marked as call
219*404b540aSrobert clobbered. */
220*404b540aSrobert
221*404b540aSrobert static void
compute_tag_properties(void)222*404b540aSrobert compute_tag_properties (void)
223*404b540aSrobert {
224*404b540aSrobert referenced_var_iterator rvi;
225*404b540aSrobert tree tag;
226*404b540aSrobert bool changed = true;
227*404b540aSrobert VEC (tree, heap) *taglist = NULL;
228*404b540aSrobert
229*404b540aSrobert FOR_EACH_REFERENCED_VAR (tag, rvi)
230*404b540aSrobert {
231*404b540aSrobert if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
232*404b540aSrobert continue;
233*404b540aSrobert VEC_safe_push (tree, heap, taglist, tag);
234*404b540aSrobert }
235*404b540aSrobert
236*404b540aSrobert /* We sort the taglist by DECL_UID, for two reasons.
237*404b540aSrobert 1. To get a sequential ordering to make the bitmap accesses
238*404b540aSrobert faster.
239*404b540aSrobert 2. Because of the way we compute aliases, it's more likely that
240*404b540aSrobert an earlier tag is included in a later tag, and this will reduce
241*404b540aSrobert the number of iterations.
242*404b540aSrobert
243*404b540aSrobert If we had a real tag graph, we would just topo-order it and be
244*404b540aSrobert done with it. */
245*404b540aSrobert qsort (VEC_address (tree, taglist),
246*404b540aSrobert VEC_length (tree, taglist),
247*404b540aSrobert sizeof (tree),
248*404b540aSrobert sort_tags_by_id);
249*404b540aSrobert
250*404b540aSrobert /* Go through each tag not marked as global, and if it aliases
251*404b540aSrobert global vars, mark it global.
252*404b540aSrobert
253*404b540aSrobert If the tag contains call clobbered vars, mark it call
254*404b540aSrobert clobbered.
255*404b540aSrobert
256*404b540aSrobert This loop iterates because tags may appear in the may-aliases
257*404b540aSrobert list of other tags when we group. */
258*404b540aSrobert
259*404b540aSrobert while (changed)
260*404b540aSrobert {
261*404b540aSrobert unsigned int k;
262*404b540aSrobert
263*404b540aSrobert changed = false;
264*404b540aSrobert for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
265*404b540aSrobert {
266*404b540aSrobert VEC (tree, gc) *ma;
267*404b540aSrobert unsigned int i;
268*404b540aSrobert tree entry;
269*404b540aSrobert bool tagcc = is_call_clobbered (tag);
270*404b540aSrobert bool tagglobal = MTAG_GLOBAL (tag);
271*404b540aSrobert
272*404b540aSrobert if (tagcc && tagglobal)
273*404b540aSrobert continue;
274*404b540aSrobert
275*404b540aSrobert ma = may_aliases (tag);
276*404b540aSrobert if (!ma)
277*404b540aSrobert continue;
278*404b540aSrobert
279*404b540aSrobert for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
280*404b540aSrobert {
281*404b540aSrobert /* Call clobbered entries cause the tag to be marked
282*404b540aSrobert call clobbered. */
283*404b540aSrobert if (!tagcc && is_call_clobbered (entry))
284*404b540aSrobert {
285*404b540aSrobert mark_call_clobbered (tag, var_ann (entry)->escape_mask);
286*404b540aSrobert tagcc = true;
287*404b540aSrobert changed = true;
288*404b540aSrobert }
289*404b540aSrobert
290*404b540aSrobert /* Global vars cause the tag to be marked global. */
291*404b540aSrobert if (!tagglobal && is_global_var (entry))
292*404b540aSrobert {
293*404b540aSrobert MTAG_GLOBAL (tag) = true;
294*404b540aSrobert changed = true;
295*404b540aSrobert tagglobal = true;
296*404b540aSrobert }
297*404b540aSrobert
298*404b540aSrobert /* Early exit once both global and cc are set, since the
299*404b540aSrobert loop can't do any more than that. */
300*404b540aSrobert if (tagcc && tagglobal)
301*404b540aSrobert break;
302*404b540aSrobert }
303*404b540aSrobert }
304*404b540aSrobert }
305*404b540aSrobert VEC_free (tree, heap, taglist);
306*404b540aSrobert }
307*404b540aSrobert
308*404b540aSrobert /* Set up the initial variable clobbers and globalness.
309*404b540aSrobert When this function completes, only tags whose aliases need to be
310*404b540aSrobert clobbered will be set clobbered. Tags clobbered because they
311*404b540aSrobert contain call clobbered vars are handled in compute_tag_properties. */
312*404b540aSrobert
313*404b540aSrobert static void
set_initial_properties(struct alias_info * ai)314*404b540aSrobert set_initial_properties (struct alias_info *ai)
315*404b540aSrobert {
316*404b540aSrobert unsigned int i;
317*404b540aSrobert referenced_var_iterator rvi;
318*404b540aSrobert tree var;
319*404b540aSrobert tree ptr;
320*404b540aSrobert
321*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
322*404b540aSrobert {
323*404b540aSrobert if (is_global_var (var)
324*404b540aSrobert && (!var_can_have_subvars (var)
325*404b540aSrobert || get_subvars_for_var (var) == NULL))
326*404b540aSrobert {
327*404b540aSrobert if (!unmodifiable_var_p (var))
328*404b540aSrobert mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
329*404b540aSrobert }
330*404b540aSrobert else if (TREE_CODE (var) == PARM_DECL
331*404b540aSrobert && default_def (var)
332*404b540aSrobert && POINTER_TYPE_P (TREE_TYPE (var)))
333*404b540aSrobert {
334*404b540aSrobert tree def = default_def (var);
335*404b540aSrobert get_ptr_info (def)->value_escapes_p = 1;
336*404b540aSrobert get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
337*404b540aSrobert }
338*404b540aSrobert }
339*404b540aSrobert
340*404b540aSrobert for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
341*404b540aSrobert {
342*404b540aSrobert struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
343*404b540aSrobert var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
344*404b540aSrobert
345*404b540aSrobert if (pi->value_escapes_p)
346*404b540aSrobert {
347*404b540aSrobert /* If PTR escapes then its associated memory tags and
348*404b540aSrobert pointed-to variables are call-clobbered. */
349*404b540aSrobert if (pi->name_mem_tag)
350*404b540aSrobert mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
351*404b540aSrobert
352*404b540aSrobert if (v_ann->symbol_mem_tag)
353*404b540aSrobert mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
354*404b540aSrobert
355*404b540aSrobert if (pi->pt_vars)
356*404b540aSrobert {
357*404b540aSrobert bitmap_iterator bi;
358*404b540aSrobert unsigned int j;
359*404b540aSrobert EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
360*404b540aSrobert if (!unmodifiable_var_p (referenced_var (j)))
361*404b540aSrobert mark_call_clobbered (referenced_var (j), pi->escape_mask);
362*404b540aSrobert }
363*404b540aSrobert }
364*404b540aSrobert
365*404b540aSrobert /* If the name tag is call clobbered, so is the symbol tag
366*404b540aSrobert associated with the base VAR_DECL. */
367*404b540aSrobert if (pi->name_mem_tag
368*404b540aSrobert && v_ann->symbol_mem_tag
369*404b540aSrobert && is_call_clobbered (pi->name_mem_tag))
370*404b540aSrobert mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
371*404b540aSrobert
372*404b540aSrobert /* Name tags and symbol tags that we don't know where they point
373*404b540aSrobert to, might point to global memory, and thus, are clobbered.
374*404b540aSrobert
375*404b540aSrobert FIXME: This is not quite right. They should only be
376*404b540aSrobert clobbered if value_escapes_p is true, regardless of whether
377*404b540aSrobert they point to global memory or not.
378*404b540aSrobert So removing this code and fixing all the bugs would be nice.
379*404b540aSrobert It is the cause of a bunch of clobbering. */
380*404b540aSrobert if ((pi->pt_global_mem || pi->pt_anything)
381*404b540aSrobert && pi->is_dereferenced && pi->name_mem_tag)
382*404b540aSrobert {
383*404b540aSrobert mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
384*404b540aSrobert MTAG_GLOBAL (pi->name_mem_tag) = true;
385*404b540aSrobert }
386*404b540aSrobert
387*404b540aSrobert if ((pi->pt_global_mem || pi->pt_anything)
388*404b540aSrobert && pi->is_dereferenced
389*404b540aSrobert && v_ann->symbol_mem_tag)
390*404b540aSrobert {
391*404b540aSrobert mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
392*404b540aSrobert MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
393*404b540aSrobert }
394*404b540aSrobert }
395*404b540aSrobert }
396*404b540aSrobert
397*404b540aSrobert
398*404b540aSrobert /* This variable is set to true if we are updating the used alone
399*404b540aSrobert information for SMTs, or are in a pass that is going to break it
400*404b540aSrobert temporarily. */
401*404b540aSrobert bool updating_used_alone;
402*404b540aSrobert
403*404b540aSrobert /* Compute which variables need to be marked call clobbered because
404*404b540aSrobert their tag is call clobbered, and which tags need to be marked
405*404b540aSrobert global because they contain global variables. */
406*404b540aSrobert
407*404b540aSrobert static void
compute_call_clobbered(struct alias_info * ai)408*404b540aSrobert compute_call_clobbered (struct alias_info *ai)
409*404b540aSrobert {
410*404b540aSrobert VEC (tree, heap) *worklist = NULL;
411*404b540aSrobert VEC(int,heap) *worklist2 = NULL;
412*404b540aSrobert
413*404b540aSrobert set_initial_properties (ai);
414*404b540aSrobert init_transitive_clobber_worklist (&worklist, &worklist2);
415*404b540aSrobert while (VEC_length (tree, worklist) != 0)
416*404b540aSrobert {
417*404b540aSrobert tree curr = VEC_pop (tree, worklist);
418*404b540aSrobert int reason = VEC_pop (int, worklist2);
419*404b540aSrobert
420*404b540aSrobert mark_call_clobbered (curr, reason);
421*404b540aSrobert mark_aliases_call_clobbered (curr, &worklist, &worklist2);
422*404b540aSrobert }
423*404b540aSrobert VEC_free (tree, heap, worklist);
424*404b540aSrobert VEC_free (int, heap, worklist2);
425*404b540aSrobert compute_tag_properties ();
426*404b540aSrobert }
427*404b540aSrobert
428*404b540aSrobert
429*404b540aSrobert /* Helper for recalculate_used_alone. Return a conservatively correct
430*404b540aSrobert answer as to whether STMT may make a store on the LHS to SYM. */
431*404b540aSrobert
432*404b540aSrobert static bool
lhs_may_store_to(tree stmt,tree sym ATTRIBUTE_UNUSED)433*404b540aSrobert lhs_may_store_to (tree stmt, tree sym ATTRIBUTE_UNUSED)
434*404b540aSrobert {
435*404b540aSrobert tree lhs = TREE_OPERAND (stmt, 0);
436*404b540aSrobert
437*404b540aSrobert lhs = get_base_address (lhs);
438*404b540aSrobert
439*404b540aSrobert if (!lhs)
440*404b540aSrobert return false;
441*404b540aSrobert
442*404b540aSrobert if (TREE_CODE (lhs) == SSA_NAME)
443*404b540aSrobert return false;
444*404b540aSrobert /* We could do better here by looking at the type tag of LHS, but it
445*404b540aSrobert is unclear whether this is worth it. */
446*404b540aSrobert return true;
447*404b540aSrobert }
448*404b540aSrobert
449*404b540aSrobert /* Recalculate the used_alone information for SMTs . */
450*404b540aSrobert
451*404b540aSrobert void
recalculate_used_alone(void)452*404b540aSrobert recalculate_used_alone (void)
453*404b540aSrobert {
454*404b540aSrobert VEC (tree, heap) *calls = NULL;
455*404b540aSrobert block_stmt_iterator bsi;
456*404b540aSrobert basic_block bb;
457*404b540aSrobert tree stmt;
458*404b540aSrobert size_t i;
459*404b540aSrobert referenced_var_iterator rvi;
460*404b540aSrobert tree var;
461*404b540aSrobert
462*404b540aSrobert /* First, reset all the SMT used alone bits to zero. */
463*404b540aSrobert updating_used_alone = true;
464*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
465*404b540aSrobert if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
466*404b540aSrobert {
467*404b540aSrobert SMT_OLD_USED_ALONE (var) = SMT_USED_ALONE (var);
468*404b540aSrobert SMT_USED_ALONE (var) = 0;
469*404b540aSrobert }
470*404b540aSrobert
471*404b540aSrobert /* Walk all the statements.
472*404b540aSrobert Calls get put into a list of statements to update, since we will
473*404b540aSrobert need to update operands on them if we make any changes.
474*404b540aSrobert If we see a bare use of a SMT anywhere in a real virtual use or virtual
475*404b540aSrobert def, mark the SMT as used alone, and for renaming. */
476*404b540aSrobert FOR_EACH_BB (bb)
477*404b540aSrobert {
478*404b540aSrobert for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
479*404b540aSrobert {
480*404b540aSrobert bool iscall = false;
481*404b540aSrobert ssa_op_iter iter;
482*404b540aSrobert
483*404b540aSrobert stmt = bsi_stmt (bsi);
484*404b540aSrobert
485*404b540aSrobert if (TREE_CODE (stmt) == CALL_EXPR
486*404b540aSrobert || (TREE_CODE (stmt) == MODIFY_EXPR
487*404b540aSrobert && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
488*404b540aSrobert {
489*404b540aSrobert iscall = true;
490*404b540aSrobert VEC_safe_push (tree, heap, calls, stmt);
491*404b540aSrobert }
492*404b540aSrobert
493*404b540aSrobert FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
494*404b540aSrobert SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
495*404b540aSrobert {
496*404b540aSrobert tree svar = var;
497*404b540aSrobert
498*404b540aSrobert if (TREE_CODE (var) == SSA_NAME)
499*404b540aSrobert svar = SSA_NAME_VAR (var);
500*404b540aSrobert
501*404b540aSrobert if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG)
502*404b540aSrobert {
503*404b540aSrobert /* We only care about the LHS on calls. */
504*404b540aSrobert if (iscall && !lhs_may_store_to (stmt, svar))
505*404b540aSrobert continue;
506*404b540aSrobert
507*404b540aSrobert if (!SMT_USED_ALONE (svar))
508*404b540aSrobert {
509*404b540aSrobert SMT_USED_ALONE (svar) = true;
510*404b540aSrobert
511*404b540aSrobert /* Only need to mark for renaming if it wasn't
512*404b540aSrobert used alone before. */
513*404b540aSrobert if (!SMT_OLD_USED_ALONE (svar))
514*404b540aSrobert mark_sym_for_renaming (svar);
515*404b540aSrobert }
516*404b540aSrobert }
517*404b540aSrobert }
518*404b540aSrobert }
519*404b540aSrobert }
520*404b540aSrobert
521*404b540aSrobert /* Update the operands on all the calls we saw. */
522*404b540aSrobert if (calls)
523*404b540aSrobert {
524*404b540aSrobert for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
525*404b540aSrobert update_stmt (stmt);
526*404b540aSrobert }
527*404b540aSrobert
528*404b540aSrobert /* We need to mark SMT's that are no longer used for renaming so the
529*404b540aSrobert symbols go away, or else verification will be angry with us, even
530*404b540aSrobert though they are dead. */
531*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
532*404b540aSrobert if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
533*404b540aSrobert {
534*404b540aSrobert if (SMT_OLD_USED_ALONE (var) && !SMT_USED_ALONE (var))
535*404b540aSrobert mark_sym_for_renaming (var);
536*404b540aSrobert }
537*404b540aSrobert
538*404b540aSrobert VEC_free (tree, heap, calls);
539*404b540aSrobert updating_used_alone = false;
540*404b540aSrobert }
541*404b540aSrobert
542*404b540aSrobert /* Compute may-alias information for every variable referenced in function
543*404b540aSrobert FNDECL.
544*404b540aSrobert
545*404b540aSrobert Alias analysis proceeds in 3 main phases:
546*404b540aSrobert
547*404b540aSrobert 1- Points-to and escape analysis.
548*404b540aSrobert
549*404b540aSrobert This phase walks the use-def chains in the SSA web looking for three
550*404b540aSrobert things:
551*404b540aSrobert
552*404b540aSrobert * Assignments of the form P_i = &VAR
553*404b540aSrobert * Assignments of the form P_i = malloc()
554*404b540aSrobert * Pointers and ADDR_EXPR that escape the current function.
555*404b540aSrobert
556*404b540aSrobert The concept of 'escaping' is the same one used in the Java world. When
557*404b540aSrobert a pointer or an ADDR_EXPR escapes, it means that it has been exposed
558*404b540aSrobert outside of the current function. So, assignment to global variables,
559*404b540aSrobert function arguments and returning a pointer are all escape sites, as are
560*404b540aSrobert conversions between pointers and integers.
561*404b540aSrobert
562*404b540aSrobert This is where we are currently limited. Since not everything is renamed
563*404b540aSrobert into SSA, we lose track of escape properties when a pointer is stashed
564*404b540aSrobert inside a field in a structure, for instance. In those cases, we are
565*404b540aSrobert assuming that the pointer does escape.
566*404b540aSrobert
567*404b540aSrobert We use escape analysis to determine whether a variable is
568*404b540aSrobert call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
569*404b540aSrobert is call-clobbered. If a pointer P_i escapes, then all the variables
570*404b540aSrobert pointed-to by P_i (and its memory tag) also escape.
571*404b540aSrobert
572*404b540aSrobert 2- Compute flow-sensitive aliases
573*404b540aSrobert
574*404b540aSrobert We have two classes of memory tags. Memory tags associated with the
575*404b540aSrobert pointed-to data type of the pointers in the program. These tags are
576*404b540aSrobert called "symbol memory tag" (SMT). The other class are those associated
577*404b540aSrobert with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
578*404b540aSrobert when adding operands for an INDIRECT_REF *P_i, we will first check
579*404b540aSrobert whether P_i has a name tag, if it does we use it, because that will have
580*404b540aSrobert more precise aliasing information. Otherwise, we use the standard symbol
581*404b540aSrobert tag.
582*404b540aSrobert
583*404b540aSrobert In this phase, we go through all the pointers we found in points-to
584*404b540aSrobert analysis and create alias sets for the name memory tags associated with
585*404b540aSrobert each pointer P_i. If P_i escapes, we mark call-clobbered the variables
586*404b540aSrobert it points to and its tag.
587*404b540aSrobert
588*404b540aSrobert
589*404b540aSrobert 3- Compute flow-insensitive aliases
590*404b540aSrobert
591*404b540aSrobert This pass will compare the alias set of every symbol memory tag and
592*404b540aSrobert every addressable variable found in the program. Given a symbol
593*404b540aSrobert memory tag SMT and an addressable variable V. If the alias sets of
594*404b540aSrobert SMT and V conflict (as computed by may_alias_p), then V is marked
595*404b540aSrobert as an alias tag and added to the alias set of SMT.
596*404b540aSrobert
597*404b540aSrobert For instance, consider the following function:
598*404b540aSrobert
599*404b540aSrobert foo (int i)
600*404b540aSrobert {
601*404b540aSrobert int *p, a, b;
602*404b540aSrobert
603*404b540aSrobert if (i > 10)
604*404b540aSrobert p = &a;
605*404b540aSrobert else
606*404b540aSrobert p = &b;
607*404b540aSrobert
608*404b540aSrobert *p = 3;
609*404b540aSrobert a = b + 2;
610*404b540aSrobert return *p;
611*404b540aSrobert }
612*404b540aSrobert
613*404b540aSrobert After aliasing analysis has finished, the symbol memory tag for pointer
614*404b540aSrobert 'p' will have two aliases, namely variables 'a' and 'b'. Every time
615*404b540aSrobert pointer 'p' is dereferenced, we want to mark the operation as a
616*404b540aSrobert potential reference to 'a' and 'b'.
617*404b540aSrobert
618*404b540aSrobert foo (int i)
619*404b540aSrobert {
620*404b540aSrobert int *p, a, b;
621*404b540aSrobert
622*404b540aSrobert if (i_2 > 10)
623*404b540aSrobert p_4 = &a;
624*404b540aSrobert else
625*404b540aSrobert p_6 = &b;
626*404b540aSrobert # p_1 = PHI <p_4(1), p_6(2)>;
627*404b540aSrobert
628*404b540aSrobert # a_7 = V_MAY_DEF <a_3>;
629*404b540aSrobert # b_8 = V_MAY_DEF <b_5>;
630*404b540aSrobert *p_1 = 3;
631*404b540aSrobert
632*404b540aSrobert # a_9 = V_MAY_DEF <a_7>
633*404b540aSrobert # VUSE <b_8>
634*404b540aSrobert a_9 = b_8 + 2;
635*404b540aSrobert
636*404b540aSrobert # VUSE <a_9>;
637*404b540aSrobert # VUSE <b_8>;
638*404b540aSrobert return *p_1;
639*404b540aSrobert }
640*404b540aSrobert
641*404b540aSrobert In certain cases, the list of may aliases for a pointer may grow too
642*404b540aSrobert large. This may cause an explosion in the number of virtual operands
643*404b540aSrobert inserted in the code. Resulting in increased memory consumption and
644*404b540aSrobert compilation time.
645*404b540aSrobert
646*404b540aSrobert When the number of virtual operands needed to represent aliased
647*404b540aSrobert loads and stores grows too large (configurable with @option{--param
648*404b540aSrobert max-aliased-vops}), alias sets are grouped to avoid severe
649*404b540aSrobert compile-time slow downs and memory consumption. See group_aliases. */
650*404b540aSrobert
651*404b540aSrobert static unsigned int
compute_may_aliases(void)652*404b540aSrobert compute_may_aliases (void)
653*404b540aSrobert {
654*404b540aSrobert struct alias_info *ai;
655*404b540aSrobert
656*404b540aSrobert memset (&alias_stats, 0, sizeof (alias_stats));
657*404b540aSrobert
658*404b540aSrobert /* Initialize aliasing information. */
659*404b540aSrobert ai = init_alias_info ();
660*404b540aSrobert
661*404b540aSrobert /* For each pointer P_i, determine the sets of variables that P_i may
662*404b540aSrobert point-to. For every addressable variable V, determine whether the
663*404b540aSrobert address of V escapes the current function, making V call-clobbered
664*404b540aSrobert (i.e., whether &V is stored in a global variable or if its passed as a
665*404b540aSrobert function call argument). */
666*404b540aSrobert compute_points_to_sets (ai);
667*404b540aSrobert
668*404b540aSrobert /* Collect all pointers and addressable variables, compute alias sets,
669*404b540aSrobert create memory tags for pointers and promote variables whose address is
670*404b540aSrobert not needed anymore. */
671*404b540aSrobert setup_pointers_and_addressables (ai);
672*404b540aSrobert
673*404b540aSrobert /* Compute flow-sensitive, points-to based aliasing for all the name
674*404b540aSrobert memory tags. Note that this pass needs to be done before flow
675*404b540aSrobert insensitive analysis because it uses the points-to information
676*404b540aSrobert gathered before to mark call-clobbered symbol tags. */
677*404b540aSrobert compute_flow_sensitive_aliasing (ai);
678*404b540aSrobert
679*404b540aSrobert /* Compute type-based flow-insensitive aliasing for all the type
680*404b540aSrobert memory tags. */
681*404b540aSrobert compute_flow_insensitive_aliasing (ai);
682*404b540aSrobert
683*404b540aSrobert /* Compute call clobbering information. */
684*404b540aSrobert compute_call_clobbered (ai);
685*404b540aSrobert
686*404b540aSrobert /* Determine if we need to enable alias grouping. */
687*404b540aSrobert if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
688*404b540aSrobert group_aliases (ai);
689*404b540aSrobert
690*404b540aSrobert /* If the program has too many call-clobbered variables and/or function
691*404b540aSrobert calls, create .GLOBAL_VAR and use it to model call-clobbering
692*404b540aSrobert semantics at call sites. This reduces the number of virtual operands
693*404b540aSrobert considerably, improving compile times at the expense of lost
694*404b540aSrobert aliasing precision. */
695*404b540aSrobert maybe_create_global_var (ai);
696*404b540aSrobert
697*404b540aSrobert /* If the program contains ref-all pointers, finalize may-alias information
698*404b540aSrobert for them. This pass needs to be run after call-clobbering information
699*404b540aSrobert has been computed. */
700*404b540aSrobert if (ai->ref_all_symbol_mem_tag)
701*404b540aSrobert finalize_ref_all_pointers (ai);
702*404b540aSrobert
703*404b540aSrobert /* Debugging dumps. */
704*404b540aSrobert if (dump_file)
705*404b540aSrobert {
706*404b540aSrobert dump_referenced_vars (dump_file);
707*404b540aSrobert if (dump_flags & TDF_STATS)
708*404b540aSrobert dump_alias_stats (dump_file);
709*404b540aSrobert dump_points_to_info (dump_file);
710*404b540aSrobert dump_alias_info (dump_file);
711*404b540aSrobert }
712*404b540aSrobert
713*404b540aSrobert /* Deallocate memory used by aliasing data structures. */
714*404b540aSrobert delete_alias_info (ai);
715*404b540aSrobert
716*404b540aSrobert updating_used_alone = true;
717*404b540aSrobert {
718*404b540aSrobert block_stmt_iterator bsi;
719*404b540aSrobert basic_block bb;
720*404b540aSrobert FOR_EACH_BB (bb)
721*404b540aSrobert {
722*404b540aSrobert for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
723*404b540aSrobert {
724*404b540aSrobert update_stmt_if_modified (bsi_stmt (bsi));
725*404b540aSrobert }
726*404b540aSrobert }
727*404b540aSrobert }
728*404b540aSrobert recalculate_used_alone ();
729*404b540aSrobert updating_used_alone = false;
730*404b540aSrobert return 0;
731*404b540aSrobert }
732*404b540aSrobert
733*404b540aSrobert
734*404b540aSrobert struct tree_opt_pass pass_may_alias =
735*404b540aSrobert {
736*404b540aSrobert "alias", /* name */
737*404b540aSrobert NULL, /* gate */
738*404b540aSrobert compute_may_aliases, /* execute */
739*404b540aSrobert NULL, /* sub */
740*404b540aSrobert NULL, /* next */
741*404b540aSrobert 0, /* static_pass_number */
742*404b540aSrobert TV_TREE_MAY_ALIAS, /* tv_id */
743*404b540aSrobert PROP_cfg | PROP_ssa, /* properties_required */
744*404b540aSrobert PROP_alias, /* properties_provided */
745*404b540aSrobert 0, /* properties_destroyed */
746*404b540aSrobert 0, /* todo_flags_start */
747*404b540aSrobert TODO_dump_func | TODO_update_ssa
748*404b540aSrobert | TODO_ggc_collect | TODO_verify_ssa
749*404b540aSrobert | TODO_verify_stmts, /* todo_flags_finish */
750*404b540aSrobert 0 /* letter */
751*404b540aSrobert };
752*404b540aSrobert
753*404b540aSrobert
754*404b540aSrobert /* Data structure used to count the number of dereferences to PTR
755*404b540aSrobert inside an expression. */
756*404b540aSrobert struct count_ptr_d
757*404b540aSrobert {
758*404b540aSrobert tree ptr;
759*404b540aSrobert unsigned count;
760*404b540aSrobert };
761*404b540aSrobert
762*404b540aSrobert
763*404b540aSrobert /* Helper for count_uses_and_derefs. Called by walk_tree to look for
764*404b540aSrobert (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
765*404b540aSrobert
766*404b540aSrobert static tree
count_ptr_derefs(tree * tp,int * walk_subtrees,void * data)767*404b540aSrobert count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
768*404b540aSrobert {
769*404b540aSrobert struct count_ptr_d *count_p = (struct count_ptr_d *) data;
770*404b540aSrobert
771*404b540aSrobert /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
772*404b540aSrobert pointer 'ptr' is *not* dereferenced, it is simply used to compute
773*404b540aSrobert the address of 'fld' as 'ptr + offsetof(fld)'. */
774*404b540aSrobert if (TREE_CODE (*tp) == ADDR_EXPR)
775*404b540aSrobert {
776*404b540aSrobert *walk_subtrees = 0;
777*404b540aSrobert return NULL_TREE;
778*404b540aSrobert }
779*404b540aSrobert
780*404b540aSrobert if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
781*404b540aSrobert count_p->count++;
782*404b540aSrobert
783*404b540aSrobert return NULL_TREE;
784*404b540aSrobert }
785*404b540aSrobert
786*404b540aSrobert
787*404b540aSrobert /* Count the number of direct and indirect uses for pointer PTR in
788*404b540aSrobert statement STMT. The two counts are stored in *NUM_USES_P and
789*404b540aSrobert *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
790*404b540aSrobert least one of those dereferences is a store operation. */
791*404b540aSrobert
792*404b540aSrobert void
count_uses_and_derefs(tree ptr,tree stmt,unsigned * num_uses_p,unsigned * num_derefs_p,bool * is_store)793*404b540aSrobert count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
794*404b540aSrobert unsigned *num_derefs_p, bool *is_store)
795*404b540aSrobert {
796*404b540aSrobert ssa_op_iter i;
797*404b540aSrobert tree use;
798*404b540aSrobert
799*404b540aSrobert *num_uses_p = 0;
800*404b540aSrobert *num_derefs_p = 0;
801*404b540aSrobert *is_store = false;
802*404b540aSrobert
803*404b540aSrobert /* Find out the total number of uses of PTR in STMT. */
804*404b540aSrobert FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
805*404b540aSrobert if (use == ptr)
806*404b540aSrobert (*num_uses_p)++;
807*404b540aSrobert
808*404b540aSrobert /* Now count the number of indirect references to PTR. This is
809*404b540aSrobert truly awful, but we don't have much choice. There are no parent
810*404b540aSrobert pointers inside INDIRECT_REFs, so an expression like
811*404b540aSrobert '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
812*404b540aSrobert find all the indirect and direct uses of x_1 inside. The only
813*404b540aSrobert shortcut we can take is the fact that GIMPLE only allows
814*404b540aSrobert INDIRECT_REFs inside the expressions below. */
815*404b540aSrobert if (TREE_CODE (stmt) == MODIFY_EXPR
816*404b540aSrobert || (TREE_CODE (stmt) == RETURN_EXPR
817*404b540aSrobert && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
818*404b540aSrobert || TREE_CODE (stmt) == ASM_EXPR
819*404b540aSrobert || TREE_CODE (stmt) == CALL_EXPR)
820*404b540aSrobert {
821*404b540aSrobert tree lhs, rhs;
822*404b540aSrobert
823*404b540aSrobert if (TREE_CODE (stmt) == MODIFY_EXPR)
824*404b540aSrobert {
825*404b540aSrobert lhs = TREE_OPERAND (stmt, 0);
826*404b540aSrobert rhs = TREE_OPERAND (stmt, 1);
827*404b540aSrobert }
828*404b540aSrobert else if (TREE_CODE (stmt) == RETURN_EXPR)
829*404b540aSrobert {
830*404b540aSrobert tree e = TREE_OPERAND (stmt, 0);
831*404b540aSrobert lhs = TREE_OPERAND (e, 0);
832*404b540aSrobert rhs = TREE_OPERAND (e, 1);
833*404b540aSrobert }
834*404b540aSrobert else if (TREE_CODE (stmt) == ASM_EXPR)
835*404b540aSrobert {
836*404b540aSrobert lhs = ASM_OUTPUTS (stmt);
837*404b540aSrobert rhs = ASM_INPUTS (stmt);
838*404b540aSrobert }
839*404b540aSrobert else
840*404b540aSrobert {
841*404b540aSrobert lhs = NULL_TREE;
842*404b540aSrobert rhs = stmt;
843*404b540aSrobert }
844*404b540aSrobert
845*404b540aSrobert if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
846*404b540aSrobert {
847*404b540aSrobert struct count_ptr_d count;
848*404b540aSrobert count.ptr = ptr;
849*404b540aSrobert count.count = 0;
850*404b540aSrobert walk_tree (&lhs, count_ptr_derefs, &count, NULL);
851*404b540aSrobert *is_store = true;
852*404b540aSrobert *num_derefs_p = count.count;
853*404b540aSrobert }
854*404b540aSrobert
855*404b540aSrobert if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
856*404b540aSrobert {
857*404b540aSrobert struct count_ptr_d count;
858*404b540aSrobert count.ptr = ptr;
859*404b540aSrobert count.count = 0;
860*404b540aSrobert walk_tree (&rhs, count_ptr_derefs, &count, NULL);
861*404b540aSrobert *num_derefs_p += count.count;
862*404b540aSrobert }
863*404b540aSrobert }
864*404b540aSrobert
865*404b540aSrobert gcc_assert (*num_uses_p >= *num_derefs_p);
866*404b540aSrobert }
867*404b540aSrobert
868*404b540aSrobert /* Initialize the data structures used for alias analysis. */
869*404b540aSrobert
870*404b540aSrobert static struct alias_info *
init_alias_info(void)871*404b540aSrobert init_alias_info (void)
872*404b540aSrobert {
873*404b540aSrobert struct alias_info *ai;
874*404b540aSrobert referenced_var_iterator rvi;
875*404b540aSrobert tree var;
876*404b540aSrobert
877*404b540aSrobert bitmap_obstack_initialize (&alias_obstack);
878*404b540aSrobert ai = XCNEW (struct alias_info);
879*404b540aSrobert ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
880*404b540aSrobert sbitmap_zero (ai->ssa_names_visited);
881*404b540aSrobert ai->processed_ptrs = VEC_alloc (tree, heap, 50);
882*404b540aSrobert ai->written_vars = BITMAP_ALLOC (&alias_obstack);
883*404b540aSrobert ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
884*404b540aSrobert ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
885*404b540aSrobert
886*404b540aSrobert /* If aliases have been computed before, clear existing information. */
887*404b540aSrobert if (aliases_computed_p)
888*404b540aSrobert {
889*404b540aSrobert unsigned i;
890*404b540aSrobert
891*404b540aSrobert /* Similarly, clear the set of addressable variables. In this
892*404b540aSrobert case, we can just clear the set because addressability is
893*404b540aSrobert only computed here. */
894*404b540aSrobert bitmap_clear (addressable_vars);
895*404b540aSrobert
896*404b540aSrobert /* Clear flow-insensitive alias information from each symbol. */
897*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
898*404b540aSrobert {
899*404b540aSrobert var_ann_t ann = var_ann (var);
900*404b540aSrobert
901*404b540aSrobert ann->is_aliased = 0;
902*404b540aSrobert ann->may_aliases = NULL;
903*404b540aSrobert NUM_REFERENCES_CLEAR (ann);
904*404b540aSrobert
905*404b540aSrobert /* Since we are about to re-discover call-clobbered
906*404b540aSrobert variables, clear the call-clobbered flag. Variables that
907*404b540aSrobert are intrinsically call-clobbered (globals, local statics,
908*404b540aSrobert etc) will not be marked by the aliasing code, so we can't
909*404b540aSrobert remove them from CALL_CLOBBERED_VARS.
910*404b540aSrobert
911*404b540aSrobert NB: STRUCT_FIELDS are still call clobbered if they are for
912*404b540aSrobert a global variable, so we *don't* clear their call clobberedness
913*404b540aSrobert just because they are tags, though we will clear it if they
914*404b540aSrobert aren't for global variables. */
915*404b540aSrobert if (TREE_CODE (var) == NAME_MEMORY_TAG
916*404b540aSrobert || TREE_CODE (var) == SYMBOL_MEMORY_TAG
917*404b540aSrobert || !is_global_var (var))
918*404b540aSrobert clear_call_clobbered (var);
919*404b540aSrobert }
920*404b540aSrobert
921*404b540aSrobert /* Clear flow-sensitive points-to information from each SSA name. */
922*404b540aSrobert for (i = 1; i < num_ssa_names; i++)
923*404b540aSrobert {
924*404b540aSrobert tree name = ssa_name (i);
925*404b540aSrobert
926*404b540aSrobert if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
927*404b540aSrobert continue;
928*404b540aSrobert
929*404b540aSrobert if (SSA_NAME_PTR_INFO (name))
930*404b540aSrobert {
931*404b540aSrobert struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
932*404b540aSrobert
933*404b540aSrobert /* Clear all the flags but keep the name tag to
934*404b540aSrobert avoid creating new temporaries unnecessarily. If
935*404b540aSrobert this pointer is found to point to a subset or
936*404b540aSrobert superset of its former points-to set, then a new
937*404b540aSrobert tag will need to be created in create_name_tags. */
938*404b540aSrobert pi->pt_anything = 0;
939*404b540aSrobert pi->pt_null = 0;
940*404b540aSrobert pi->value_escapes_p = 0;
941*404b540aSrobert pi->is_dereferenced = 0;
942*404b540aSrobert if (pi->pt_vars)
943*404b540aSrobert bitmap_clear (pi->pt_vars);
944*404b540aSrobert }
945*404b540aSrobert }
946*404b540aSrobert }
947*404b540aSrobert
948*404b540aSrobert /* Next time, we will need to reset alias information. */
949*404b540aSrobert aliases_computed_p = true;
950*404b540aSrobert
951*404b540aSrobert return ai;
952*404b540aSrobert }
953*404b540aSrobert
954*404b540aSrobert
955*404b540aSrobert /* Deallocate memory used by alias analysis. */
956*404b540aSrobert
957*404b540aSrobert static void
delete_alias_info(struct alias_info * ai)958*404b540aSrobert delete_alias_info (struct alias_info *ai)
959*404b540aSrobert {
960*404b540aSrobert size_t i;
961*404b540aSrobert referenced_var_iterator rvi;
962*404b540aSrobert tree var;
963*404b540aSrobert
964*404b540aSrobert sbitmap_free (ai->ssa_names_visited);
965*404b540aSrobert VEC_free (tree, heap, ai->processed_ptrs);
966*404b540aSrobert
967*404b540aSrobert for (i = 0; i < ai->num_addressable_vars; i++)
968*404b540aSrobert free (ai->addressable_vars[i]);
969*404b540aSrobert
970*404b540aSrobert FOR_EACH_REFERENCED_VAR(var, rvi)
971*404b540aSrobert {
972*404b540aSrobert var_ann_t ann = var_ann (var);
973*404b540aSrobert NUM_REFERENCES_CLEAR (ann);
974*404b540aSrobert }
975*404b540aSrobert
976*404b540aSrobert free (ai->addressable_vars);
977*404b540aSrobert
978*404b540aSrobert for (i = 0; i < ai->num_pointers; i++)
979*404b540aSrobert free (ai->pointers[i]);
980*404b540aSrobert free (ai->pointers);
981*404b540aSrobert
982*404b540aSrobert BITMAP_FREE (ai->written_vars);
983*404b540aSrobert BITMAP_FREE (ai->dereferenced_ptrs_store);
984*404b540aSrobert BITMAP_FREE (ai->dereferenced_ptrs_load);
985*404b540aSrobert bitmap_obstack_release (&alias_obstack);
986*404b540aSrobert free (ai);
987*404b540aSrobert
988*404b540aSrobert delete_points_to_sets ();
989*404b540aSrobert }
990*404b540aSrobert
991*404b540aSrobert /* Used for hashing to identify pointer infos with identical
992*404b540aSrobert pt_vars bitmaps. */
993*404b540aSrobert static int
eq_ptr_info(const void * p1,const void * p2)994*404b540aSrobert eq_ptr_info (const void *p1, const void *p2)
995*404b540aSrobert {
996*404b540aSrobert const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
997*404b540aSrobert const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
998*404b540aSrobert return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
999*404b540aSrobert }
1000*404b540aSrobert
1001*404b540aSrobert static hashval_t
ptr_info_hash(const void * p)1002*404b540aSrobert ptr_info_hash (const void *p)
1003*404b540aSrobert {
1004*404b540aSrobert const struct ptr_info_def *n = (const struct ptr_info_def *) p;
1005*404b540aSrobert return bitmap_hash (n->pt_vars);
1006*404b540aSrobert }
1007*404b540aSrobert
1008*404b540aSrobert /* Create name tags for all the pointers that have been dereferenced.
1009*404b540aSrobert We only create a name tag for a pointer P if P is found to point to
1010*404b540aSrobert a set of variables (so that we can alias them to *P) or if it is
1011*404b540aSrobert the result of a call to malloc (which means that P cannot point to
1012*404b540aSrobert anything else nor alias any other variable).
1013*404b540aSrobert
1014*404b540aSrobert If two pointers P and Q point to the same set of variables, they
1015*404b540aSrobert are assigned the same name tag. */
1016*404b540aSrobert
1017*404b540aSrobert static void
create_name_tags(void)1018*404b540aSrobert create_name_tags (void)
1019*404b540aSrobert {
1020*404b540aSrobert size_t i;
1021*404b540aSrobert VEC (tree, heap) *with_ptvars = NULL;
1022*404b540aSrobert tree ptr;
1023*404b540aSrobert htab_t ptr_hash;
1024*404b540aSrobert
1025*404b540aSrobert /* Collect the list of pointers with a non-empty points to set. */
1026*404b540aSrobert for (i = 1; i < num_ssa_names; i++)
1027*404b540aSrobert {
1028*404b540aSrobert tree ptr = ssa_name (i);
1029*404b540aSrobert struct ptr_info_def *pi;
1030*404b540aSrobert
1031*404b540aSrobert if (!ptr
1032*404b540aSrobert || !POINTER_TYPE_P (TREE_TYPE (ptr))
1033*404b540aSrobert || !SSA_NAME_PTR_INFO (ptr))
1034*404b540aSrobert continue;
1035*404b540aSrobert
1036*404b540aSrobert pi = SSA_NAME_PTR_INFO (ptr);
1037*404b540aSrobert
1038*404b540aSrobert if (pi->pt_anything || !pi->is_dereferenced)
1039*404b540aSrobert {
1040*404b540aSrobert /* No name tags for pointers that have not been
1041*404b540aSrobert dereferenced or point to an arbitrary location. */
1042*404b540aSrobert pi->name_mem_tag = NULL_TREE;
1043*404b540aSrobert continue;
1044*404b540aSrobert }
1045*404b540aSrobert
1046*404b540aSrobert /* Set pt_anything on the pointers without pt_vars filled in so
1047*404b540aSrobert that they are assigned a symbol tag. */
1048*404b540aSrobert if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
1049*404b540aSrobert VEC_safe_push (tree, heap, with_ptvars, ptr);
1050*404b540aSrobert else
1051*404b540aSrobert set_pt_anything (ptr);
1052*404b540aSrobert }
1053*404b540aSrobert
1054*404b540aSrobert /* If we didn't find any pointers with pt_vars set, we're done. */
1055*404b540aSrobert if (!with_ptvars)
1056*404b540aSrobert return;
1057*404b540aSrobert
1058*404b540aSrobert ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
1059*404b540aSrobert /* Now go through the pointers with pt_vars, and find a name tag
1060*404b540aSrobert with the same pt_vars as this pointer, or create one if one
1061*404b540aSrobert doesn't exist. */
1062*404b540aSrobert for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
1063*404b540aSrobert {
1064*404b540aSrobert struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1065*404b540aSrobert tree old_name_tag = pi->name_mem_tag;
1066*404b540aSrobert struct ptr_info_def **slot;
1067*404b540aSrobert
1068*404b540aSrobert /* If PTR points to a set of variables, check if we don't
1069*404b540aSrobert have another pointer Q with the same points-to set before
1070*404b540aSrobert creating a tag. If so, use Q's tag instead of creating a
1071*404b540aSrobert new one.
1072*404b540aSrobert
1073*404b540aSrobert This is important for not creating unnecessary symbols
1074*404b540aSrobert and also for copy propagation. If we ever need to
1075*404b540aSrobert propagate PTR into Q or vice-versa, we would run into
1076*404b540aSrobert problems if they both had different name tags because
1077*404b540aSrobert they would have different SSA version numbers (which
1078*404b540aSrobert would force us to take the name tags in and out of SSA). */
1079*404b540aSrobert
1080*404b540aSrobert slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
1081*404b540aSrobert if (*slot)
1082*404b540aSrobert pi->name_mem_tag = (*slot)->name_mem_tag;
1083*404b540aSrobert else
1084*404b540aSrobert {
1085*404b540aSrobert *slot = pi;
1086*404b540aSrobert /* If we didn't find a pointer with the same points-to set
1087*404b540aSrobert as PTR, create a new name tag if needed. */
1088*404b540aSrobert if (pi->name_mem_tag == NULL_TREE)
1089*404b540aSrobert pi->name_mem_tag = get_nmt_for (ptr);
1090*404b540aSrobert }
1091*404b540aSrobert
1092*404b540aSrobert /* If the new name tag computed for PTR is different than
1093*404b540aSrobert the old name tag that it used to have, then the old tag
1094*404b540aSrobert needs to be removed from the IL, so we mark it for
1095*404b540aSrobert renaming. */
1096*404b540aSrobert if (old_name_tag && old_name_tag != pi->name_mem_tag)
1097*404b540aSrobert mark_sym_for_renaming (old_name_tag);
1098*404b540aSrobert
1099*404b540aSrobert TREE_THIS_VOLATILE (pi->name_mem_tag)
1100*404b540aSrobert |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1101*404b540aSrobert
1102*404b540aSrobert /* Mark the new name tag for renaming. */
1103*404b540aSrobert mark_sym_for_renaming (pi->name_mem_tag);
1104*404b540aSrobert }
1105*404b540aSrobert htab_delete (ptr_hash);
1106*404b540aSrobert
1107*404b540aSrobert VEC_free (tree, heap, with_ptvars);
1108*404b540aSrobert }
1109*404b540aSrobert
1110*404b540aSrobert
1111*404b540aSrobert /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1112*404b540aSrobert the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1113*404b540aSrobert name tag and the variables it points-to are call-clobbered. Finally, if
1114*404b540aSrobert P_i escapes and we could not determine where it points to, then all the
1115*404b540aSrobert variables in the same alias set as *P_i are marked call-clobbered. This
1116*404b540aSrobert is necessary because we must assume that P_i may take the address of any
1117*404b540aSrobert variable in the same alias set. */
1118*404b540aSrobert
1119*404b540aSrobert static void
compute_flow_sensitive_aliasing(struct alias_info * ai)1120*404b540aSrobert compute_flow_sensitive_aliasing (struct alias_info *ai)
1121*404b540aSrobert {
1122*404b540aSrobert size_t i;
1123*404b540aSrobert tree ptr;
1124*404b540aSrobert
1125*404b540aSrobert for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1126*404b540aSrobert {
1127*404b540aSrobert if (!find_what_p_points_to (ptr))
1128*404b540aSrobert set_pt_anything (ptr);
1129*404b540aSrobert }
1130*404b540aSrobert
1131*404b540aSrobert create_name_tags ();
1132*404b540aSrobert
1133*404b540aSrobert for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1134*404b540aSrobert {
1135*404b540aSrobert unsigned j;
1136*404b540aSrobert struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1137*404b540aSrobert var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
1138*404b540aSrobert bitmap_iterator bi;
1139*404b540aSrobert
1140*404b540aSrobert
1141*404b540aSrobert /* Set up aliasing information for PTR's name memory tag (if it has
1142*404b540aSrobert one). Note that only pointers that have been dereferenced will
1143*404b540aSrobert have a name memory tag. */
1144*404b540aSrobert if (pi->name_mem_tag && pi->pt_vars)
1145*404b540aSrobert EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1146*404b540aSrobert {
1147*404b540aSrobert add_may_alias (pi->name_mem_tag, referenced_var (j));
1148*404b540aSrobert add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
1149*404b540aSrobert }
1150*404b540aSrobert }
1151*404b540aSrobert }
1152*404b540aSrobert
1153*404b540aSrobert
1154*404b540aSrobert /* Compute type-based alias sets. Traverse all the pointers and
1155*404b540aSrobert addressable variables found in setup_pointers_and_addressables.
1156*404b540aSrobert
1157*404b540aSrobert For every pointer P in AI->POINTERS and addressable variable V in
1158*404b540aSrobert AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1159*404b540aSrobert memory tag (SMT) if their alias sets conflict. V is then marked as
1160*404b540aSrobert an alias tag so that the operand scanner knows that statements
1161*404b540aSrobert containing V have aliased operands. */
1162*404b540aSrobert
1163*404b540aSrobert static void
compute_flow_insensitive_aliasing(struct alias_info * ai)1164*404b540aSrobert compute_flow_insensitive_aliasing (struct alias_info *ai)
1165*404b540aSrobert {
1166*404b540aSrobert size_t i;
1167*404b540aSrobert
1168*404b540aSrobert /* Initialize counter for the total number of virtual operands that
1169*404b540aSrobert aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1170*404b540aSrobert threshold set by --params max-alias-vops, we enable alias
1171*404b540aSrobert grouping. */
1172*404b540aSrobert ai->total_alias_vops = 0;
1173*404b540aSrobert
1174*404b540aSrobert /* For every pointer P, determine which addressable variables may alias
1175*404b540aSrobert with P's symbol memory tag. */
1176*404b540aSrobert for (i = 0; i < ai->num_pointers; i++)
1177*404b540aSrobert {
1178*404b540aSrobert size_t j;
1179*404b540aSrobert struct alias_map_d *p_map = ai->pointers[i];
1180*404b540aSrobert tree tag = var_ann (p_map->var)->symbol_mem_tag;
1181*404b540aSrobert var_ann_t tag_ann = var_ann (tag);
1182*404b540aSrobert tree var;
1183*404b540aSrobert
1184*404b540aSrobert /* Call-clobbering information is not finalized yet at this point. */
1185*404b540aSrobert if (PTR_IS_REF_ALL (p_map->var))
1186*404b540aSrobert continue;
1187*404b540aSrobert
1188*404b540aSrobert p_map->total_alias_vops = 0;
1189*404b540aSrobert p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1190*404b540aSrobert
1191*404b540aSrobert /* Add any pre-existing may_aliases to the bitmap used to represent
1192*404b540aSrobert TAG's alias set in case we need to group aliases. */
1193*404b540aSrobert for (j = 0; VEC_iterate (tree, tag_ann->may_aliases, j, var); ++j)
1194*404b540aSrobert bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1195*404b540aSrobert
1196*404b540aSrobert for (j = 0; j < ai->num_addressable_vars; j++)
1197*404b540aSrobert {
1198*404b540aSrobert struct alias_map_d *v_map;
1199*404b540aSrobert var_ann_t v_ann;
1200*404b540aSrobert bool tag_stored_p, var_stored_p;
1201*404b540aSrobert
1202*404b540aSrobert v_map = ai->addressable_vars[j];
1203*404b540aSrobert var = v_map->var;
1204*404b540aSrobert v_ann = var_ann (var);
1205*404b540aSrobert
1206*404b540aSrobert /* Skip memory tags and variables that have never been
1207*404b540aSrobert written to. We also need to check if the variables are
1208*404b540aSrobert call-clobbered because they may be overwritten by
1209*404b540aSrobert function calls.
1210*404b540aSrobert
1211*404b540aSrobert Note this is effectively random accessing elements in
1212*404b540aSrobert the sparse bitset, which can be highly inefficient.
1213*404b540aSrobert So we first check the call_clobbered status of the
1214*404b540aSrobert tag and variable before querying the bitmap. */
1215*404b540aSrobert tag_stored_p = is_call_clobbered (tag)
1216*404b540aSrobert || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1217*404b540aSrobert var_stored_p = is_call_clobbered (var)
1218*404b540aSrobert || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1219*404b540aSrobert if (!tag_stored_p && !var_stored_p)
1220*404b540aSrobert continue;
1221*404b540aSrobert
1222*404b540aSrobert if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1223*404b540aSrobert {
1224*404b540aSrobert size_t num_tag_refs, num_var_refs;
1225*404b540aSrobert
1226*404b540aSrobert num_tag_refs = NUM_REFERENCES (tag_ann);
1227*404b540aSrobert num_var_refs = NUM_REFERENCES (v_ann);
1228*404b540aSrobert
1229*404b540aSrobert /* Add VAR to TAG's may-aliases set. */
1230*404b540aSrobert
1231*404b540aSrobert /* We should never have a var with subvars here, because
1232*404b540aSrobert they shouldn't get into the set of addressable vars */
1233*404b540aSrobert gcc_assert (!var_can_have_subvars (var)
1234*404b540aSrobert || get_subvars_for_var (var) == NULL);
1235*404b540aSrobert
1236*404b540aSrobert add_may_alias (tag, var);
1237*404b540aSrobert /* Update the bitmap used to represent TAG's alias set
1238*404b540aSrobert in case we need to group aliases. */
1239*404b540aSrobert bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1240*404b540aSrobert
1241*404b540aSrobert /* Update the total number of virtual operands due to
1242*404b540aSrobert aliasing. Since we are adding one more alias to TAG's
1243*404b540aSrobert may-aliases set, the total number of virtual operands due
1244*404b540aSrobert to aliasing will be increased by the number of references
1245*404b540aSrobert made to VAR and TAG (every reference to TAG will also
1246*404b540aSrobert count as a reference to VAR). */
1247*404b540aSrobert ai->total_alias_vops += (num_var_refs + num_tag_refs);
1248*404b540aSrobert p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1249*404b540aSrobert
1250*404b540aSrobert
1251*404b540aSrobert }
1252*404b540aSrobert }
1253*404b540aSrobert }
1254*404b540aSrobert
1255*404b540aSrobert /* Since this analysis is based exclusively on symbols, it fails to
1256*404b540aSrobert handle cases where two pointers P and Q have different memory
1257*404b540aSrobert tags with conflicting alias set numbers but no aliased symbols in
1258*404b540aSrobert common.
1259*404b540aSrobert
1260*404b540aSrobert For example, suppose that we have two memory tags SMT.1 and SMT.2
1261*404b540aSrobert such that
1262*404b540aSrobert
1263*404b540aSrobert may-aliases (SMT.1) = { a }
1264*404b540aSrobert may-aliases (SMT.2) = { b }
1265*404b540aSrobert
1266*404b540aSrobert and the alias set number of SMT.1 conflicts with that of SMT.2.
1267*404b540aSrobert Since they don't have symbols in common, loads and stores from
1268*404b540aSrobert SMT.1 and SMT.2 will seem independent of each other, which will
1269*404b540aSrobert lead to the optimizers making invalid transformations (see
1270*404b540aSrobert testsuite/gcc.c-torture/execute/pr15262-[12].c).
1271*404b540aSrobert
1272*404b540aSrobert To avoid this problem, we do a final traversal of AI->POINTERS
1273*404b540aSrobert looking for pairs of pointers that have no aliased symbols in
1274*404b540aSrobert common and yet have conflicting alias set numbers. */
1275*404b540aSrobert for (i = 0; i < ai->num_pointers; i++)
1276*404b540aSrobert {
1277*404b540aSrobert size_t j;
1278*404b540aSrobert struct alias_map_d *p_map1 = ai->pointers[i];
1279*404b540aSrobert tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
1280*404b540aSrobert bitmap may_aliases1 = p_map1->may_aliases;
1281*404b540aSrobert
1282*404b540aSrobert if (PTR_IS_REF_ALL (p_map1->var))
1283*404b540aSrobert continue;
1284*404b540aSrobert
1285*404b540aSrobert for (j = i + 1; j < ai->num_pointers; j++)
1286*404b540aSrobert {
1287*404b540aSrobert struct alias_map_d *p_map2 = ai->pointers[j];
1288*404b540aSrobert tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
1289*404b540aSrobert bitmap may_aliases2 = p_map2->may_aliases;
1290*404b540aSrobert
1291*404b540aSrobert if (PTR_IS_REF_ALL (p_map2->var))
1292*404b540aSrobert continue;
1293*404b540aSrobert
1294*404b540aSrobert /* If the pointers may not point to each other, do nothing. */
1295*404b540aSrobert if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1296*404b540aSrobert continue;
1297*404b540aSrobert
1298*404b540aSrobert /* The two pointers may alias each other. If they already have
1299*404b540aSrobert symbols in common, do nothing. */
1300*404b540aSrobert if (bitmap_intersect_p (may_aliases1, may_aliases2))
1301*404b540aSrobert continue;
1302*404b540aSrobert
1303*404b540aSrobert if (!bitmap_empty_p (may_aliases2))
1304*404b540aSrobert {
1305*404b540aSrobert unsigned int k;
1306*404b540aSrobert bitmap_iterator bi;
1307*404b540aSrobert
1308*404b540aSrobert /* Add all the aliases for TAG2 into TAG1's alias set.
1309*404b540aSrobert FIXME, update grouping heuristic counters. */
1310*404b540aSrobert EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1311*404b540aSrobert add_may_alias (tag1, referenced_var (k));
1312*404b540aSrobert bitmap_ior_into (may_aliases1, may_aliases2);
1313*404b540aSrobert }
1314*404b540aSrobert else
1315*404b540aSrobert {
1316*404b540aSrobert /* Since TAG2 does not have any aliases of its own, add
1317*404b540aSrobert TAG2 itself to the alias set of TAG1. */
1318*404b540aSrobert add_may_alias (tag1, tag2);
1319*404b540aSrobert bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1320*404b540aSrobert }
1321*404b540aSrobert }
1322*404b540aSrobert }
1323*404b540aSrobert
1324*404b540aSrobert if (dump_file)
1325*404b540aSrobert fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1326*404b540aSrobert get_name (current_function_decl),
1327*404b540aSrobert ai->total_alias_vops);
1328*404b540aSrobert }
1329*404b540aSrobert
1330*404b540aSrobert
1331*404b540aSrobert /* Finalize may-alias information for ref-all pointers. Traverse all
1332*404b540aSrobert the addressable variables found in setup_pointers_and_addressables.
1333*404b540aSrobert
1334*404b540aSrobert If flow-sensitive alias analysis has attached a name memory tag to
1335*404b540aSrobert a ref-all pointer, we will use it for the dereferences because that
1336*404b540aSrobert will have more precise aliasing information. But if there is no
1337*404b540aSrobert name tag, we will use a special symbol tag that aliases all the
1338*404b540aSrobert call-clobbered addressable variables. */
1339*404b540aSrobert
1340*404b540aSrobert static void
finalize_ref_all_pointers(struct alias_info * ai)1341*404b540aSrobert finalize_ref_all_pointers (struct alias_info *ai)
1342*404b540aSrobert {
1343*404b540aSrobert size_t i;
1344*404b540aSrobert
1345*404b540aSrobert if (global_var)
1346*404b540aSrobert add_may_alias (ai->ref_all_symbol_mem_tag, global_var);
1347*404b540aSrobert else
1348*404b540aSrobert {
1349*404b540aSrobert /* First add the real call-clobbered variables. */
1350*404b540aSrobert for (i = 0; i < ai->num_addressable_vars; i++)
1351*404b540aSrobert {
1352*404b540aSrobert tree var = ai->addressable_vars[i]->var;
1353*404b540aSrobert if (is_call_clobbered (var))
1354*404b540aSrobert add_may_alias (ai->ref_all_symbol_mem_tag, var);
1355*404b540aSrobert }
1356*404b540aSrobert
1357*404b540aSrobert /* Then add the call-clobbered pointer memory tags. See
1358*404b540aSrobert compute_flow_insensitive_aliasing for the rationale. */
1359*404b540aSrobert for (i = 0; i < ai->num_pointers; i++)
1360*404b540aSrobert {
1361*404b540aSrobert tree ptr = ai->pointers[i]->var, tag;
1362*404b540aSrobert if (PTR_IS_REF_ALL (ptr))
1363*404b540aSrobert continue;
1364*404b540aSrobert tag = var_ann (ptr)->symbol_mem_tag;
1365*404b540aSrobert if (is_call_clobbered (tag))
1366*404b540aSrobert add_may_alias (ai->ref_all_symbol_mem_tag, tag);
1367*404b540aSrobert }
1368*404b540aSrobert }
1369*404b540aSrobert }
1370*404b540aSrobert
1371*404b540aSrobert
1372*404b540aSrobert /* Comparison function for qsort used in group_aliases. */
1373*404b540aSrobert
1374*404b540aSrobert static int
total_alias_vops_cmp(const void * p,const void * q)1375*404b540aSrobert total_alias_vops_cmp (const void *p, const void *q)
1376*404b540aSrobert {
1377*404b540aSrobert const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1378*404b540aSrobert const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1379*404b540aSrobert long n1 = (*p1)->total_alias_vops;
1380*404b540aSrobert long n2 = (*p2)->total_alias_vops;
1381*404b540aSrobert
1382*404b540aSrobert /* We want to sort in descending order. */
1383*404b540aSrobert return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1384*404b540aSrobert }
1385*404b540aSrobert
1386*404b540aSrobert /* Group all the aliases for TAG to make TAG represent all the
1387*404b540aSrobert variables in its alias set. Update the total number
1388*404b540aSrobert of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1389*404b540aSrobert function will make TAG be the unique alias tag for all the
1390*404b540aSrobert variables in its may-aliases. So, given:
1391*404b540aSrobert
1392*404b540aSrobert may-aliases(TAG) = { V1, V2, V3 }
1393*404b540aSrobert
1394*404b540aSrobert This function will group the variables into:
1395*404b540aSrobert
1396*404b540aSrobert may-aliases(V1) = { TAG }
1397*404b540aSrobert may-aliases(V2) = { TAG }
1398*404b540aSrobert may-aliases(V2) = { TAG } */
1399*404b540aSrobert
1400*404b540aSrobert static void
group_aliases_into(tree tag,bitmap tag_aliases,struct alias_info * ai)1401*404b540aSrobert group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1402*404b540aSrobert {
1403*404b540aSrobert unsigned int i;
1404*404b540aSrobert var_ann_t tag_ann = var_ann (tag);
1405*404b540aSrobert size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1406*404b540aSrobert bitmap_iterator bi;
1407*404b540aSrobert
1408*404b540aSrobert EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1409*404b540aSrobert {
1410*404b540aSrobert tree var = referenced_var (i);
1411*404b540aSrobert var_ann_t ann = var_ann (var);
1412*404b540aSrobert
1413*404b540aSrobert /* Make TAG the unique alias of VAR. */
1414*404b540aSrobert ann->is_aliased = 0;
1415*404b540aSrobert ann->may_aliases = NULL;
1416*404b540aSrobert
1417*404b540aSrobert /* Note that VAR and TAG may be the same if the function has no
1418*404b540aSrobert addressable variables (see the discussion at the end of
1419*404b540aSrobert setup_pointers_and_addressables). */
1420*404b540aSrobert if (var != tag)
1421*404b540aSrobert add_may_alias (var, tag);
1422*404b540aSrobert
1423*404b540aSrobert /* Reduce total number of virtual operands contributed
1424*404b540aSrobert by TAG on behalf of VAR. Notice that the references to VAR
1425*404b540aSrobert itself won't be removed. We will merely replace them with
1426*404b540aSrobert references to TAG. */
1427*404b540aSrobert ai->total_alias_vops -= num_tag_refs;
1428*404b540aSrobert }
1429*404b540aSrobert
1430*404b540aSrobert /* We have reduced the number of virtual operands that TAG makes on
1431*404b540aSrobert behalf of all the variables formerly aliased with it. However,
1432*404b540aSrobert we have also "removed" all the virtual operands for TAG itself,
1433*404b540aSrobert so we add them back. */
1434*404b540aSrobert ai->total_alias_vops += num_tag_refs;
1435*404b540aSrobert
1436*404b540aSrobert /* TAG no longer has any aliases. */
1437*404b540aSrobert tag_ann->may_aliases = NULL;
1438*404b540aSrobert }
1439*404b540aSrobert
1440*404b540aSrobert
1441*404b540aSrobert /* Group may-aliases sets to reduce the number of virtual operands due
1442*404b540aSrobert to aliasing.
1443*404b540aSrobert
1444*404b540aSrobert 1- Sort the list of pointers in decreasing number of contributed
1445*404b540aSrobert virtual operands.
1446*404b540aSrobert
1447*404b540aSrobert 2- Take the first entry in AI->POINTERS and revert the role of
1448*404b540aSrobert the memory tag and its aliases. Usually, whenever an aliased
1449*404b540aSrobert variable Vi is found to alias with a memory tag T, we add Vi
1450*404b540aSrobert to the may-aliases set for T. Meaning that after alias
1451*404b540aSrobert analysis, we will have:
1452*404b540aSrobert
1453*404b540aSrobert may-aliases(T) = { V1, V2, V3, ..., Vn }
1454*404b540aSrobert
1455*404b540aSrobert This means that every statement that references T, will get 'n'
1456*404b540aSrobert virtual operands for each of the Vi tags. But, when alias
1457*404b540aSrobert grouping is enabled, we make T an alias tag and add it to the
1458*404b540aSrobert alias set of all the Vi variables:
1459*404b540aSrobert
1460*404b540aSrobert may-aliases(V1) = { T }
1461*404b540aSrobert may-aliases(V2) = { T }
1462*404b540aSrobert ...
1463*404b540aSrobert may-aliases(Vn) = { T }
1464*404b540aSrobert
1465*404b540aSrobert This has two effects: (a) statements referencing T will only get
1466*404b540aSrobert a single virtual operand, and, (b) all the variables Vi will now
1467*404b540aSrobert appear to alias each other. So, we lose alias precision to
1468*404b540aSrobert improve compile time. But, in theory, a program with such a high
1469*404b540aSrobert level of aliasing should not be very optimizable in the first
1470*404b540aSrobert place.
1471*404b540aSrobert
1472*404b540aSrobert 3- Since variables may be in the alias set of more than one
1473*404b540aSrobert memory tag, the grouping done in step (2) needs to be extended
1474*404b540aSrobert to all the memory tags that have a non-empty intersection with
1475*404b540aSrobert the may-aliases set of tag T. For instance, if we originally
1476*404b540aSrobert had these may-aliases sets:
1477*404b540aSrobert
1478*404b540aSrobert may-aliases(T) = { V1, V2, V3 }
1479*404b540aSrobert may-aliases(R) = { V2, V4 }
1480*404b540aSrobert
1481*404b540aSrobert In step (2) we would have reverted the aliases for T as:
1482*404b540aSrobert
1483*404b540aSrobert may-aliases(V1) = { T }
1484*404b540aSrobert may-aliases(V2) = { T }
1485*404b540aSrobert may-aliases(V3) = { T }
1486*404b540aSrobert
1487*404b540aSrobert But note that now V2 is no longer aliased with R. We could
1488*404b540aSrobert add R to may-aliases(V2), but we are in the process of
1489*404b540aSrobert grouping aliases to reduce virtual operands so what we do is
1490*404b540aSrobert add V4 to the grouping to obtain:
1491*404b540aSrobert
1492*404b540aSrobert may-aliases(V1) = { T }
1493*404b540aSrobert may-aliases(V2) = { T }
1494*404b540aSrobert may-aliases(V3) = { T }
1495*404b540aSrobert may-aliases(V4) = { T }
1496*404b540aSrobert
1497*404b540aSrobert 4- If the total number of virtual operands due to aliasing is
1498*404b540aSrobert still above the threshold set by max-alias-vops, go back to (2). */
1499*404b540aSrobert
1500*404b540aSrobert static void
group_aliases(struct alias_info * ai)1501*404b540aSrobert group_aliases (struct alias_info *ai)
1502*404b540aSrobert {
1503*404b540aSrobert size_t i;
1504*404b540aSrobert tree ptr;
1505*404b540aSrobert
1506*404b540aSrobert /* Sort the POINTERS array in descending order of contributed
1507*404b540aSrobert virtual operands. */
1508*404b540aSrobert qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1509*404b540aSrobert total_alias_vops_cmp);
1510*404b540aSrobert
1511*404b540aSrobert /* For every pointer in AI->POINTERS, reverse the roles of its tag
1512*404b540aSrobert and the tag's may-aliases set. */
1513*404b540aSrobert for (i = 0; i < ai->num_pointers; i++)
1514*404b540aSrobert {
1515*404b540aSrobert size_t j;
1516*404b540aSrobert tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
1517*404b540aSrobert bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1518*404b540aSrobert
1519*404b540aSrobert /* Skip tags that have been grouped already. */
1520*404b540aSrobert if (ai->pointers[i]->grouped_p)
1521*404b540aSrobert continue;
1522*404b540aSrobert
1523*404b540aSrobert /* See if TAG1 had any aliases in common with other symbol tags.
1524*404b540aSrobert If we find a TAG2 with common aliases with TAG1, add TAG2's
1525*404b540aSrobert aliases into TAG1. */
1526*404b540aSrobert for (j = i + 1; j < ai->num_pointers; j++)
1527*404b540aSrobert {
1528*404b540aSrobert bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1529*404b540aSrobert
1530*404b540aSrobert if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1531*404b540aSrobert {
1532*404b540aSrobert tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
1533*404b540aSrobert
1534*404b540aSrobert bitmap_ior_into (tag1_aliases, tag2_aliases);
1535*404b540aSrobert
1536*404b540aSrobert /* TAG2 does not need its aliases anymore. */
1537*404b540aSrobert bitmap_clear (tag2_aliases);
1538*404b540aSrobert var_ann (tag2)->may_aliases = NULL;
1539*404b540aSrobert
1540*404b540aSrobert /* TAG1 is the unique alias of TAG2. */
1541*404b540aSrobert add_may_alias (tag2, tag1);
1542*404b540aSrobert
1543*404b540aSrobert ai->pointers[j]->grouped_p = true;
1544*404b540aSrobert }
1545*404b540aSrobert }
1546*404b540aSrobert
1547*404b540aSrobert /* Now group all the aliases we collected into TAG1. */
1548*404b540aSrobert group_aliases_into (tag1, tag1_aliases, ai);
1549*404b540aSrobert
1550*404b540aSrobert /* If we've reduced total number of virtual operands below the
1551*404b540aSrobert threshold, stop. */
1552*404b540aSrobert if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1553*404b540aSrobert break;
1554*404b540aSrobert }
1555*404b540aSrobert
1556*404b540aSrobert /* Finally, all the variables that have been grouped cannot be in
1557*404b540aSrobert the may-alias set of name memory tags. Suppose that we have
1558*404b540aSrobert grouped the aliases in this code so that may-aliases(a) = SMT.20
1559*404b540aSrobert
1560*404b540aSrobert p_5 = &a;
1561*404b540aSrobert ...
1562*404b540aSrobert # a_9 = V_MAY_DEF <a_8>
1563*404b540aSrobert p_5->field = 0
1564*404b540aSrobert ... Several modifications to SMT.20 ...
1565*404b540aSrobert # VUSE <a_9>
1566*404b540aSrobert x_30 = p_5->field
1567*404b540aSrobert
1568*404b540aSrobert Since p_5 points to 'a', the optimizers will try to propagate 0
1569*404b540aSrobert into p_5->field, but that is wrong because there have been
1570*404b540aSrobert modifications to 'SMT.20' in between. To prevent this we have to
1571*404b540aSrobert replace 'a' with 'SMT.20' in the name tag of p_5. */
1572*404b540aSrobert for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1573*404b540aSrobert {
1574*404b540aSrobert size_t j;
1575*404b540aSrobert tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1576*404b540aSrobert VEC(tree,gc) *aliases;
1577*404b540aSrobert tree alias;
1578*404b540aSrobert
1579*404b540aSrobert if (name_tag == NULL_TREE)
1580*404b540aSrobert continue;
1581*404b540aSrobert
1582*404b540aSrobert aliases = var_ann (name_tag)->may_aliases;
1583*404b540aSrobert for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1584*404b540aSrobert {
1585*404b540aSrobert var_ann_t ann = var_ann (alias);
1586*404b540aSrobert
1587*404b540aSrobert if ((!MTAG_P (alias)
1588*404b540aSrobert || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1589*404b540aSrobert && ann->may_aliases)
1590*404b540aSrobert {
1591*404b540aSrobert tree new_alias;
1592*404b540aSrobert
1593*404b540aSrobert gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1594*404b540aSrobert
1595*404b540aSrobert new_alias = VEC_index (tree, ann->may_aliases, 0);
1596*404b540aSrobert replace_may_alias (name_tag, j, new_alias);
1597*404b540aSrobert }
1598*404b540aSrobert }
1599*404b540aSrobert }
1600*404b540aSrobert
1601*404b540aSrobert if (dump_file)
1602*404b540aSrobert fprintf (dump_file,
1603*404b540aSrobert "%s: Total number of aliased vops after grouping: %ld%s\n",
1604*404b540aSrobert get_name (current_function_decl),
1605*404b540aSrobert ai->total_alias_vops,
1606*404b540aSrobert (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1607*404b540aSrobert }
1608*404b540aSrobert
1609*404b540aSrobert
1610*404b540aSrobert /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1611*404b540aSrobert
1612*404b540aSrobert static void
create_alias_map_for(tree var,struct alias_info * ai)1613*404b540aSrobert create_alias_map_for (tree var, struct alias_info *ai)
1614*404b540aSrobert {
1615*404b540aSrobert struct alias_map_d *alias_map;
1616*404b540aSrobert alias_map = XCNEW (struct alias_map_d);
1617*404b540aSrobert alias_map->var = var;
1618*404b540aSrobert alias_map->set = get_alias_set (var);
1619*404b540aSrobert ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1620*404b540aSrobert }
1621*404b540aSrobert
1622*404b540aSrobert
1623*404b540aSrobert /* Create memory tags for all the dereferenced pointers and build the
1624*404b540aSrobert ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1625*404b540aSrobert sets. Based on the address escape and points-to information collected
1626*404b540aSrobert earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1627*404b540aSrobert variables whose address is not needed anymore. */
1628*404b540aSrobert
1629*404b540aSrobert static void
setup_pointers_and_addressables(struct alias_info * ai)1630*404b540aSrobert setup_pointers_and_addressables (struct alias_info *ai)
1631*404b540aSrobert {
1632*404b540aSrobert size_t n_vars, num_addressable_vars, num_pointers;
1633*404b540aSrobert referenced_var_iterator rvi;
1634*404b540aSrobert tree var;
1635*404b540aSrobert VEC (tree, heap) *varvec = NULL;
1636*404b540aSrobert safe_referenced_var_iterator srvi;
1637*404b540aSrobert
1638*404b540aSrobert /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1639*404b540aSrobert num_addressable_vars = num_pointers = 0;
1640*404b540aSrobert
1641*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
1642*404b540aSrobert {
1643*404b540aSrobert if (may_be_aliased (var))
1644*404b540aSrobert num_addressable_vars++;
1645*404b540aSrobert
1646*404b540aSrobert if (POINTER_TYPE_P (TREE_TYPE (var)))
1647*404b540aSrobert {
1648*404b540aSrobert /* Since we don't keep track of volatile variables, assume that
1649*404b540aSrobert these pointers are used in indirect store operations. */
1650*404b540aSrobert if (TREE_THIS_VOLATILE (var))
1651*404b540aSrobert bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1652*404b540aSrobert
1653*404b540aSrobert num_pointers++;
1654*404b540aSrobert }
1655*404b540aSrobert }
1656*404b540aSrobert
1657*404b540aSrobert /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1658*404b540aSrobert always going to be slightly bigger than we actually need them
1659*404b540aSrobert because some TREE_ADDRESSABLE variables will be marked
1660*404b540aSrobert non-addressable below and only pointers with unique symbol tags are
1661*404b540aSrobert going to be added to POINTERS. */
1662*404b540aSrobert ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1663*404b540aSrobert ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1664*404b540aSrobert ai->num_addressable_vars = 0;
1665*404b540aSrobert ai->num_pointers = 0;
1666*404b540aSrobert
1667*404b540aSrobert /* Since we will be creating symbol memory tags within this loop,
1668*404b540aSrobert cache the value of NUM_REFERENCED_VARS to avoid processing the
1669*404b540aSrobert additional tags unnecessarily. */
1670*404b540aSrobert n_vars = num_referenced_vars;
1671*404b540aSrobert
1672*404b540aSrobert FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1673*404b540aSrobert {
1674*404b540aSrobert var_ann_t v_ann = var_ann (var);
1675*404b540aSrobert subvar_t svars;
1676*404b540aSrobert
1677*404b540aSrobert /* Name memory tags already have flow-sensitive aliasing
1678*404b540aSrobert information, so they need not be processed by
1679*404b540aSrobert compute_flow_insensitive_aliasing. Similarly, symbol memory
1680*404b540aSrobert tags are already accounted for when we process their
1681*404b540aSrobert associated pointer.
1682*404b540aSrobert
1683*404b540aSrobert Structure fields, on the other hand, have to have some of this
1684*404b540aSrobert information processed for them, but it's pointless to mark them
1685*404b540aSrobert non-addressable (since they are fake variables anyway). */
1686*404b540aSrobert if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1687*404b540aSrobert continue;
1688*404b540aSrobert
1689*404b540aSrobert /* Remove the ADDRESSABLE flag from every addressable variable whose
1690*404b540aSrobert address is not needed anymore. This is caused by the propagation
1691*404b540aSrobert of ADDR_EXPR constants into INDIRECT_REF expressions and the
1692*404b540aSrobert removal of dead pointer assignments done by the early scalar
1693*404b540aSrobert cleanup passes. */
1694*404b540aSrobert if (TREE_ADDRESSABLE (var))
1695*404b540aSrobert {
1696*404b540aSrobert if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1697*404b540aSrobert && TREE_CODE (var) != RESULT_DECL
1698*404b540aSrobert && !is_global_var (var))
1699*404b540aSrobert {
1700*404b540aSrobert bool okay_to_mark = true;
1701*404b540aSrobert
1702*404b540aSrobert /* Since VAR is now a regular GIMPLE register, we will need
1703*404b540aSrobert to rename VAR into SSA afterwards. */
1704*404b540aSrobert mark_sym_for_renaming (var);
1705*404b540aSrobert
1706*404b540aSrobert /* If VAR can have sub-variables, and any of its
1707*404b540aSrobert sub-variables has its address taken, then we cannot
1708*404b540aSrobert remove the addressable flag from VAR. */
1709*404b540aSrobert if (var_can_have_subvars (var)
1710*404b540aSrobert && (svars = get_subvars_for_var (var)))
1711*404b540aSrobert {
1712*404b540aSrobert subvar_t sv;
1713*404b540aSrobert
1714*404b540aSrobert for (sv = svars; sv; sv = sv->next)
1715*404b540aSrobert {
1716*404b540aSrobert if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1717*404b540aSrobert okay_to_mark = false;
1718*404b540aSrobert mark_sym_for_renaming (sv->var);
1719*404b540aSrobert }
1720*404b540aSrobert }
1721*404b540aSrobert
1722*404b540aSrobert /* The address of VAR is not needed, remove the
1723*404b540aSrobert addressable bit, so that it can be optimized as a
1724*404b540aSrobert regular variable. */
1725*404b540aSrobert if (okay_to_mark)
1726*404b540aSrobert mark_non_addressable (var);
1727*404b540aSrobert }
1728*404b540aSrobert }
1729*404b540aSrobert
1730*404b540aSrobert /* Global variables and addressable locals may be aliased. Create an
1731*404b540aSrobert entry in ADDRESSABLE_VARS for VAR. */
1732*404b540aSrobert if (may_be_aliased (var)
1733*404b540aSrobert && (!var_can_have_subvars (var)
1734*404b540aSrobert || get_subvars_for_var (var) == NULL))
1735*404b540aSrobert {
1736*404b540aSrobert create_alias_map_for (var, ai);
1737*404b540aSrobert mark_sym_for_renaming (var);
1738*404b540aSrobert }
1739*404b540aSrobert
1740*404b540aSrobert /* Add pointer variables that have been dereferenced to the POINTERS
1741*404b540aSrobert array and create a symbol memory tag for them. */
1742*404b540aSrobert if (POINTER_TYPE_P (TREE_TYPE (var)))
1743*404b540aSrobert {
1744*404b540aSrobert if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1745*404b540aSrobert || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1746*404b540aSrobert {
1747*404b540aSrobert tree tag;
1748*404b540aSrobert var_ann_t t_ann;
1749*404b540aSrobert
1750*404b540aSrobert /* If pointer VAR still doesn't have a memory tag
1751*404b540aSrobert associated with it, create it now or re-use an
1752*404b540aSrobert existing one. */
1753*404b540aSrobert tag = get_tmt_for (var, ai);
1754*404b540aSrobert t_ann = var_ann (tag);
1755*404b540aSrobert
1756*404b540aSrobert /* The symbol tag will need to be renamed into SSA
1757*404b540aSrobert afterwards. Note that we cannot do this inside
1758*404b540aSrobert get_tmt_for because aliasing may run multiple times
1759*404b540aSrobert and we only create symbol tags the first time. */
1760*404b540aSrobert mark_sym_for_renaming (tag);
1761*404b540aSrobert
1762*404b540aSrobert /* Similarly, if pointer VAR used to have another type
1763*404b540aSrobert tag, we will need to process it in the renamer to
1764*404b540aSrobert remove the stale virtual operands. */
1765*404b540aSrobert if (v_ann->symbol_mem_tag)
1766*404b540aSrobert mark_sym_for_renaming (v_ann->symbol_mem_tag);
1767*404b540aSrobert
1768*404b540aSrobert /* Associate the tag with pointer VAR. */
1769*404b540aSrobert v_ann->symbol_mem_tag = tag;
1770*404b540aSrobert
1771*404b540aSrobert /* If pointer VAR has been used in a store operation,
1772*404b540aSrobert then its memory tag must be marked as written-to. */
1773*404b540aSrobert if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1774*404b540aSrobert bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1775*404b540aSrobert
1776*404b540aSrobert /* All the dereferences of pointer VAR count as
1777*404b540aSrobert references of TAG. Since TAG can be associated with
1778*404b540aSrobert several pointers, add the dereferences of VAR to the
1779*404b540aSrobert TAG. */
1780*404b540aSrobert NUM_REFERENCES_SET (t_ann,
1781*404b540aSrobert NUM_REFERENCES (t_ann)
1782*404b540aSrobert + NUM_REFERENCES (v_ann));
1783*404b540aSrobert }
1784*404b540aSrobert else
1785*404b540aSrobert {
1786*404b540aSrobert /* The pointer has not been dereferenced. If it had a
1787*404b540aSrobert symbol memory tag, remove it and mark the old tag for
1788*404b540aSrobert renaming to remove it out of the IL. */
1789*404b540aSrobert var_ann_t ann = var_ann (var);
1790*404b540aSrobert tree tag = ann->symbol_mem_tag;
1791*404b540aSrobert if (tag)
1792*404b540aSrobert {
1793*404b540aSrobert mark_sym_for_renaming (tag);
1794*404b540aSrobert ann->symbol_mem_tag = NULL_TREE;
1795*404b540aSrobert }
1796*404b540aSrobert }
1797*404b540aSrobert }
1798*404b540aSrobert }
1799*404b540aSrobert VEC_free (tree, heap, varvec);
1800*404b540aSrobert }
1801*404b540aSrobert
1802*404b540aSrobert
1803*404b540aSrobert /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1804*404b540aSrobert every call site, we need to emit V_MAY_DEF expressions to represent the
1805*404b540aSrobert clobbering effects of the call for variables whose address escapes the
1806*404b540aSrobert current function.
1807*404b540aSrobert
1808*404b540aSrobert One approach is to group all call-clobbered variables into a single
1809*404b540aSrobert representative that is used as an alias of every call-clobbered variable
1810*404b540aSrobert (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1811*404b540aSrobert references to any call clobbered variable is a reference to .GLOBAL_VAR.
1812*404b540aSrobert
1813*404b540aSrobert The second approach is to emit a clobbering V_MAY_DEF for every
1814*404b540aSrobert call-clobbered variable at call sites. This is the preferred way in terms
1815*404b540aSrobert of optimization opportunities but it may create too many V_MAY_DEF operands
1816*404b540aSrobert if there are many call clobbered variables and function calls in the
1817*404b540aSrobert function.
1818*404b540aSrobert
1819*404b540aSrobert To decide whether or not to use .GLOBAL_VAR we multiply the number of
1820*404b540aSrobert function calls found by the number of call-clobbered variables. If that
1821*404b540aSrobert product is beyond a certain threshold, as determined by the parameterized
1822*404b540aSrobert values shown below, we use .GLOBAL_VAR.
1823*404b540aSrobert
1824*404b540aSrobert FIXME. This heuristic should be improved. One idea is to use several
1825*404b540aSrobert .GLOBAL_VARs of different types instead of a single one. The thresholds
1826*404b540aSrobert have been derived from a typical bootstrap cycle, including all target
1827*404b540aSrobert libraries. Compile times were found increase by ~1% compared to using
1828*404b540aSrobert .GLOBAL_VAR. */
1829*404b540aSrobert
1830*404b540aSrobert static void
maybe_create_global_var(struct alias_info * ai)1831*404b540aSrobert maybe_create_global_var (struct alias_info *ai)
1832*404b540aSrobert {
1833*404b540aSrobert unsigned i, n_clobbered;
1834*404b540aSrobert bitmap_iterator bi;
1835*404b540aSrobert
1836*404b540aSrobert /* No need to create it, if we have one already. */
1837*404b540aSrobert if (global_var == NULL_TREE)
1838*404b540aSrobert {
1839*404b540aSrobert /* Count all the call-clobbered variables. */
1840*404b540aSrobert n_clobbered = 0;
1841*404b540aSrobert EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1842*404b540aSrobert {
1843*404b540aSrobert n_clobbered++;
1844*404b540aSrobert }
1845*404b540aSrobert
1846*404b540aSrobert /* If the number of virtual operands that would be needed to
1847*404b540aSrobert model all the call-clobbered variables is larger than
1848*404b540aSrobert GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1849*404b540aSrobert
1850*404b540aSrobert Also create .GLOBAL_VAR if there are no call-clobbered
1851*404b540aSrobert variables and the program contains a mixture of pure/const
1852*404b540aSrobert and regular function calls. This is to avoid the problem
1853*404b540aSrobert described in PR 20115:
1854*404b540aSrobert
1855*404b540aSrobert int X;
1856*404b540aSrobert int func_pure (void) { return X; }
1857*404b540aSrobert int func_non_pure (int a) { X += a; }
1858*404b540aSrobert int foo ()
1859*404b540aSrobert {
1860*404b540aSrobert int a = func_pure ();
1861*404b540aSrobert func_non_pure (a);
1862*404b540aSrobert a = func_pure ();
1863*404b540aSrobert return a;
1864*404b540aSrobert }
1865*404b540aSrobert
1866*404b540aSrobert Since foo() has no call-clobbered variables, there is
1867*404b540aSrobert no relationship between the calls to func_pure and
1868*404b540aSrobert func_non_pure. Since func_pure has no side-effects, value
1869*404b540aSrobert numbering optimizations elide the second call to func_pure.
1870*404b540aSrobert So, if we have some pure/const and some regular calls in the
1871*404b540aSrobert program we create .GLOBAL_VAR to avoid missing these
1872*404b540aSrobert relations. */
1873*404b540aSrobert if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1874*404b540aSrobert || (n_clobbered == 0
1875*404b540aSrobert && ai->num_calls_found > 0
1876*404b540aSrobert && ai->num_pure_const_calls_found > 0
1877*404b540aSrobert && ai->num_calls_found > ai->num_pure_const_calls_found))
1878*404b540aSrobert create_global_var ();
1879*404b540aSrobert }
1880*404b540aSrobert
1881*404b540aSrobert /* Mark all call-clobbered symbols for renaming. Since the initial
1882*404b540aSrobert rewrite into SSA ignored all call sites, we may need to rename
1883*404b540aSrobert .GLOBAL_VAR and the call-clobbered variables. */
1884*404b540aSrobert EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1885*404b540aSrobert {
1886*404b540aSrobert tree var = referenced_var (i);
1887*404b540aSrobert
1888*404b540aSrobert /* If the function has calls to clobbering functions and
1889*404b540aSrobert .GLOBAL_VAR has been created, make it an alias for all
1890*404b540aSrobert call-clobbered variables. */
1891*404b540aSrobert if (global_var && var != global_var)
1892*404b540aSrobert {
1893*404b540aSrobert add_may_alias (var, global_var);
1894*404b540aSrobert gcc_assert (!get_subvars_for_var (var));
1895*404b540aSrobert }
1896*404b540aSrobert
1897*404b540aSrobert mark_sym_for_renaming (var);
1898*404b540aSrobert }
1899*404b540aSrobert }
1900*404b540aSrobert
1901*404b540aSrobert
1902*404b540aSrobert /* Return TRUE if pointer PTR may point to variable VAR.
1903*404b540aSrobert
1904*404b540aSrobert MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1905*404b540aSrobert This is needed because when checking for type conflicts we are
1906*404b540aSrobert interested in the alias set of the memory location pointed-to by
1907*404b540aSrobert PTR. The alias set of PTR itself is irrelevant.
1908*404b540aSrobert
1909*404b540aSrobert VAR_ALIAS_SET is the alias set for VAR. */
1910*404b540aSrobert
1911*404b540aSrobert static bool
may_alias_p(tree ptr,HOST_WIDE_INT mem_alias_set,tree var,HOST_WIDE_INT var_alias_set,bool alias_set_only)1912*404b540aSrobert may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1913*404b540aSrobert tree var, HOST_WIDE_INT var_alias_set,
1914*404b540aSrobert bool alias_set_only)
1915*404b540aSrobert {
1916*404b540aSrobert tree mem;
1917*404b540aSrobert
1918*404b540aSrobert alias_stats.alias_queries++;
1919*404b540aSrobert alias_stats.simple_queries++;
1920*404b540aSrobert
1921*404b540aSrobert /* By convention, a variable cannot alias itself. */
1922*404b540aSrobert mem = var_ann (ptr)->symbol_mem_tag;
1923*404b540aSrobert if (mem == var)
1924*404b540aSrobert {
1925*404b540aSrobert alias_stats.alias_noalias++;
1926*404b540aSrobert alias_stats.simple_resolved++;
1927*404b540aSrobert return false;
1928*404b540aSrobert }
1929*404b540aSrobert
1930*404b540aSrobert /* If -fargument-noalias-global is > 2, pointer arguments may
1931*404b540aSrobert not point to anything else. */
1932*404b540aSrobert if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
1933*404b540aSrobert {
1934*404b540aSrobert alias_stats.alias_noalias++;
1935*404b540aSrobert alias_stats.simple_resolved++;
1936*404b540aSrobert return false;
1937*404b540aSrobert }
1938*404b540aSrobert
1939*404b540aSrobert /* If -fargument-noalias-global is > 1, pointer arguments may
1940*404b540aSrobert not point to global variables. */
1941*404b540aSrobert if (flag_argument_noalias > 1 && is_global_var (var)
1942*404b540aSrobert && TREE_CODE (ptr) == PARM_DECL)
1943*404b540aSrobert {
1944*404b540aSrobert alias_stats.alias_noalias++;
1945*404b540aSrobert alias_stats.simple_resolved++;
1946*404b540aSrobert return false;
1947*404b540aSrobert }
1948*404b540aSrobert
1949*404b540aSrobert /* If either MEM or VAR is a read-only global and the other one
1950*404b540aSrobert isn't, then PTR cannot point to VAR. */
1951*404b540aSrobert if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1952*404b540aSrobert || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1953*404b540aSrobert {
1954*404b540aSrobert alias_stats.alias_noalias++;
1955*404b540aSrobert alias_stats.simple_resolved++;
1956*404b540aSrobert return false;
1957*404b540aSrobert }
1958*404b540aSrobert
1959*404b540aSrobert gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
1960*404b540aSrobert
1961*404b540aSrobert alias_stats.tbaa_queries++;
1962*404b540aSrobert
1963*404b540aSrobert /* If the alias sets don't conflict then MEM cannot alias VAR. */
1964*404b540aSrobert if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1965*404b540aSrobert {
1966*404b540aSrobert alias_stats.alias_noalias++;
1967*404b540aSrobert alias_stats.tbaa_resolved++;
1968*404b540aSrobert return false;
1969*404b540aSrobert }
1970*404b540aSrobert
1971*404b540aSrobert /* If var is a record or union type, ptr cannot point into var
1972*404b540aSrobert unless there is some operation explicit address operation in the
1973*404b540aSrobert program that can reference a field of the ptr's dereferenced
1974*404b540aSrobert type. This also assumes that the types of both var and ptr are
1975*404b540aSrobert contained within the compilation unit, and that there is no fancy
1976*404b540aSrobert addressing arithmetic associated with any of the types
1977*404b540aSrobert involved. */
1978*404b540aSrobert
1979*404b540aSrobert if ((mem_alias_set != 0) && (var_alias_set != 0))
1980*404b540aSrobert {
1981*404b540aSrobert tree ptr_type = TREE_TYPE (ptr);
1982*404b540aSrobert tree var_type = TREE_TYPE (var);
1983*404b540aSrobert
1984*404b540aSrobert /* The star count is -1 if the type at the end of the pointer_to
1985*404b540aSrobert chain is not a record or union type. */
1986*404b540aSrobert if ((!alias_set_only) &&
1987*404b540aSrobert ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1988*404b540aSrobert {
1989*404b540aSrobert int ptr_star_count = 0;
1990*404b540aSrobert
1991*404b540aSrobert /* Ipa_type_escape_star_count_of_interesting_type is a little to
1992*404b540aSrobert restrictive for the pointer type, need to allow pointers to
1993*404b540aSrobert primitive types as long as those types cannot be pointers
1994*404b540aSrobert to everything. */
1995*404b540aSrobert while (POINTER_TYPE_P (ptr_type))
1996*404b540aSrobert /* Strip the *'s off. */
1997*404b540aSrobert {
1998*404b540aSrobert ptr_type = TREE_TYPE (ptr_type);
1999*404b540aSrobert ptr_star_count++;
2000*404b540aSrobert }
2001*404b540aSrobert
2002*404b540aSrobert /* There does not appear to be a better test to see if the
2003*404b540aSrobert pointer type was one of the pointer to everything
2004*404b540aSrobert types. */
2005*404b540aSrobert
2006*404b540aSrobert if (ptr_star_count > 0)
2007*404b540aSrobert {
2008*404b540aSrobert alias_stats.structnoaddress_queries++;
2009*404b540aSrobert if (ipa_type_escape_field_does_not_clobber_p (var_type,
2010*404b540aSrobert TREE_TYPE (ptr)))
2011*404b540aSrobert {
2012*404b540aSrobert alias_stats.structnoaddress_resolved++;
2013*404b540aSrobert alias_stats.alias_noalias++;
2014*404b540aSrobert return false;
2015*404b540aSrobert }
2016*404b540aSrobert }
2017*404b540aSrobert else if (ptr_star_count == 0)
2018*404b540aSrobert {
2019*404b540aSrobert /* If ptr_type was not really a pointer to type, it cannot
2020*404b540aSrobert alias. */
2021*404b540aSrobert alias_stats.structnoaddress_queries++;
2022*404b540aSrobert alias_stats.structnoaddress_resolved++;
2023*404b540aSrobert alias_stats.alias_noalias++;
2024*404b540aSrobert return false;
2025*404b540aSrobert }
2026*404b540aSrobert }
2027*404b540aSrobert }
2028*404b540aSrobert
2029*404b540aSrobert alias_stats.alias_mayalias++;
2030*404b540aSrobert return true;
2031*404b540aSrobert }
2032*404b540aSrobert
2033*404b540aSrobert
2034*404b540aSrobert /* Add ALIAS to the set of variables that may alias VAR. */
2035*404b540aSrobert
2036*404b540aSrobert static void
add_may_alias(tree var,tree alias)2037*404b540aSrobert add_may_alias (tree var, tree alias)
2038*404b540aSrobert {
2039*404b540aSrobert size_t i;
2040*404b540aSrobert var_ann_t v_ann = get_var_ann (var);
2041*404b540aSrobert var_ann_t a_ann = get_var_ann (alias);
2042*404b540aSrobert tree al;
2043*404b540aSrobert
2044*404b540aSrobert /* Don't allow self-referential aliases. */
2045*404b540aSrobert gcc_assert (var != alias);
2046*404b540aSrobert
2047*404b540aSrobert /* ALIAS must be addressable if it's being added to an alias set. */
2048*404b540aSrobert #if 1
2049*404b540aSrobert TREE_ADDRESSABLE (alias) = 1;
2050*404b540aSrobert #else
2051*404b540aSrobert gcc_assert (may_be_aliased (alias));
2052*404b540aSrobert #endif
2053*404b540aSrobert
2054*404b540aSrobert if (v_ann->may_aliases == NULL)
2055*404b540aSrobert v_ann->may_aliases = VEC_alloc (tree, gc, 2);
2056*404b540aSrobert
2057*404b540aSrobert /* Avoid adding duplicates. */
2058*404b540aSrobert for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
2059*404b540aSrobert if (alias == al)
2060*404b540aSrobert return;
2061*404b540aSrobert
2062*404b540aSrobert VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
2063*404b540aSrobert a_ann->is_aliased = 1;
2064*404b540aSrobert }
2065*404b540aSrobert
2066*404b540aSrobert
2067*404b540aSrobert /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
2068*404b540aSrobert
2069*404b540aSrobert static void
replace_may_alias(tree var,size_t i,tree new_alias)2070*404b540aSrobert replace_may_alias (tree var, size_t i, tree new_alias)
2071*404b540aSrobert {
2072*404b540aSrobert var_ann_t v_ann = var_ann (var);
2073*404b540aSrobert VEC_replace (tree, v_ann->may_aliases, i, new_alias);
2074*404b540aSrobert }
2075*404b540aSrobert
2076*404b540aSrobert
2077*404b540aSrobert /* Mark pointer PTR as pointing to an arbitrary memory location. */
2078*404b540aSrobert
2079*404b540aSrobert static void
set_pt_anything(tree ptr)2080*404b540aSrobert set_pt_anything (tree ptr)
2081*404b540aSrobert {
2082*404b540aSrobert struct ptr_info_def *pi = get_ptr_info (ptr);
2083*404b540aSrobert
2084*404b540aSrobert pi->pt_anything = 1;
2085*404b540aSrobert pi->pt_vars = NULL;
2086*404b540aSrobert
2087*404b540aSrobert /* The pointer used to have a name tag, but we now found it pointing
2088*404b540aSrobert to an arbitrary location. The name tag needs to be renamed and
2089*404b540aSrobert disassociated from PTR. */
2090*404b540aSrobert if (pi->name_mem_tag)
2091*404b540aSrobert {
2092*404b540aSrobert mark_sym_for_renaming (pi->name_mem_tag);
2093*404b540aSrobert pi->name_mem_tag = NULL_TREE;
2094*404b540aSrobert }
2095*404b540aSrobert }
2096*404b540aSrobert
2097*404b540aSrobert
2098*404b540aSrobert /* Return true if STMT is an "escape" site from the current function. Escape
2099*404b540aSrobert sites those statements which might expose the address of a variable
2100*404b540aSrobert outside the current function. STMT is an escape site iff:
2101*404b540aSrobert
2102*404b540aSrobert 1- STMT is a function call, or
2103*404b540aSrobert 2- STMT is an __asm__ expression, or
2104*404b540aSrobert 3- STMT is an assignment to a non-local variable, or
2105*404b540aSrobert 4- STMT is a return statement.
2106*404b540aSrobert
2107*404b540aSrobert Return the type of escape site found, if we found one, or NO_ESCAPE
2108*404b540aSrobert if none. */
2109*404b540aSrobert
2110*404b540aSrobert enum escape_type
is_escape_site(tree stmt)2111*404b540aSrobert is_escape_site (tree stmt)
2112*404b540aSrobert {
2113*404b540aSrobert tree call = get_call_expr_in (stmt);
2114*404b540aSrobert if (call != NULL_TREE)
2115*404b540aSrobert {
2116*404b540aSrobert if (!TREE_SIDE_EFFECTS (call))
2117*404b540aSrobert return ESCAPE_TO_PURE_CONST;
2118*404b540aSrobert
2119*404b540aSrobert return ESCAPE_TO_CALL;
2120*404b540aSrobert }
2121*404b540aSrobert else if (TREE_CODE (stmt) == ASM_EXPR)
2122*404b540aSrobert return ESCAPE_TO_ASM;
2123*404b540aSrobert else if (TREE_CODE (stmt) == MODIFY_EXPR)
2124*404b540aSrobert {
2125*404b540aSrobert tree lhs = TREE_OPERAND (stmt, 0);
2126*404b540aSrobert
2127*404b540aSrobert /* Get to the base of _REF nodes. */
2128*404b540aSrobert if (TREE_CODE (lhs) != SSA_NAME)
2129*404b540aSrobert lhs = get_base_address (lhs);
2130*404b540aSrobert
2131*404b540aSrobert /* If we couldn't recognize the LHS of the assignment, assume that it
2132*404b540aSrobert is a non-local store. */
2133*404b540aSrobert if (lhs == NULL_TREE)
2134*404b540aSrobert return ESCAPE_UNKNOWN;
2135*404b540aSrobert
2136*404b540aSrobert if (TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
2137*404b540aSrobert || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
2138*404b540aSrobert || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2139*404b540aSrobert {
2140*404b540aSrobert tree from = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0));
2141*404b540aSrobert tree to = TREE_TYPE (TREE_OPERAND (stmt, 1));
2142*404b540aSrobert
2143*404b540aSrobert /* If the RHS is a conversion between a pointer and an integer, the
2144*404b540aSrobert pointer escapes since we can't track the integer. */
2145*404b540aSrobert if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2146*404b540aSrobert return ESCAPE_BAD_CAST;
2147*404b540aSrobert
2148*404b540aSrobert /* Same if the RHS is a conversion between a regular pointer and a
2149*404b540aSrobert ref-all pointer since we can't track the SMT of the former. */
2150*404b540aSrobert if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2151*404b540aSrobert && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2152*404b540aSrobert return ESCAPE_BAD_CAST;
2153*404b540aSrobert }
2154*404b540aSrobert
2155*404b540aSrobert /* If the LHS is an SSA name, it can't possibly represent a non-local
2156*404b540aSrobert memory store. */
2157*404b540aSrobert if (TREE_CODE (lhs) == SSA_NAME)
2158*404b540aSrobert return NO_ESCAPE;
2159*404b540aSrobert
2160*404b540aSrobert /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2161*404b540aSrobert local variables we cannot be sure if it will escape, because we
2162*404b540aSrobert don't have information about objects not in SSA form. Need to
2163*404b540aSrobert implement something along the lines of
2164*404b540aSrobert
2165*404b540aSrobert J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2166*404b540aSrobert Midkiff, ``Escape analysis for java,'' in Proceedings of the
2167*404b540aSrobert Conference on Object-Oriented Programming Systems, Languages, and
2168*404b540aSrobert Applications (OOPSLA), pp. 1-19, 1999. */
2169*404b540aSrobert return ESCAPE_STORED_IN_GLOBAL;
2170*404b540aSrobert }
2171*404b540aSrobert else if (TREE_CODE (stmt) == RETURN_EXPR)
2172*404b540aSrobert return ESCAPE_TO_RETURN;
2173*404b540aSrobert
2174*404b540aSrobert return NO_ESCAPE;
2175*404b540aSrobert }
2176*404b540aSrobert
2177*404b540aSrobert /* Create a new memory tag of type TYPE.
2178*404b540aSrobert Does NOT push it into the current binding. */
2179*404b540aSrobert
2180*404b540aSrobert static tree
create_tag_raw(enum tree_code code,tree type,const char * prefix)2181*404b540aSrobert create_tag_raw (enum tree_code code, tree type, const char *prefix)
2182*404b540aSrobert {
2183*404b540aSrobert tree tmp_var;
2184*404b540aSrobert tree new_type;
2185*404b540aSrobert
2186*404b540aSrobert /* Make the type of the variable writable. */
2187*404b540aSrobert new_type = build_type_variant (type, 0, 0);
2188*404b540aSrobert TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2189*404b540aSrobert
2190*404b540aSrobert tmp_var = build_decl (code, create_tmp_var_name (prefix),
2191*404b540aSrobert type);
2192*404b540aSrobert /* Make the variable writable. */
2193*404b540aSrobert TREE_READONLY (tmp_var) = 0;
2194*404b540aSrobert
2195*404b540aSrobert /* It doesn't start out global. */
2196*404b540aSrobert MTAG_GLOBAL (tmp_var) = 0;
2197*404b540aSrobert TREE_STATIC (tmp_var) = 0;
2198*404b540aSrobert TREE_USED (tmp_var) = 1;
2199*404b540aSrobert
2200*404b540aSrobert return tmp_var;
2201*404b540aSrobert }
2202*404b540aSrobert
2203*404b540aSrobert /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2204*404b540aSrobert is considered to represent all the pointers whose pointed-to types are
2205*404b540aSrobert in the same alias set class. Otherwise, the tag represents a single
2206*404b540aSrobert SSA_NAME pointer variable. */
2207*404b540aSrobert
2208*404b540aSrobert static tree
create_memory_tag(tree type,bool is_type_tag)2209*404b540aSrobert create_memory_tag (tree type, bool is_type_tag)
2210*404b540aSrobert {
2211*404b540aSrobert var_ann_t ann;
2212*404b540aSrobert tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2213*404b540aSrobert type, (is_type_tag) ? "SMT" : "NMT");
2214*404b540aSrobert
2215*404b540aSrobert /* By default, memory tags are local variables. Alias analysis will
2216*404b540aSrobert determine whether they should be considered globals. */
2217*404b540aSrobert DECL_CONTEXT (tag) = current_function_decl;
2218*404b540aSrobert
2219*404b540aSrobert /* Memory tags are by definition addressable. */
2220*404b540aSrobert TREE_ADDRESSABLE (tag) = 1;
2221*404b540aSrobert
2222*404b540aSrobert ann = get_var_ann (tag);
2223*404b540aSrobert ann->symbol_mem_tag = NULL_TREE;
2224*404b540aSrobert
2225*404b540aSrobert /* Add the tag to the symbol table. */
2226*404b540aSrobert add_referenced_var (tag);
2227*404b540aSrobert
2228*404b540aSrobert return tag;
2229*404b540aSrobert }
2230*404b540aSrobert
2231*404b540aSrobert
2232*404b540aSrobert /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2233*404b540aSrobert This is used if P_i has been found to point to a specific set of
2234*404b540aSrobert variables or to a non-aliased memory location like the address returned
2235*404b540aSrobert by malloc functions. */
2236*404b540aSrobert
2237*404b540aSrobert static tree
get_nmt_for(tree ptr)2238*404b540aSrobert get_nmt_for (tree ptr)
2239*404b540aSrobert {
2240*404b540aSrobert struct ptr_info_def *pi = get_ptr_info (ptr);
2241*404b540aSrobert tree tag = pi->name_mem_tag;
2242*404b540aSrobert
2243*404b540aSrobert if (tag == NULL_TREE)
2244*404b540aSrobert tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2245*404b540aSrobert return tag;
2246*404b540aSrobert }
2247*404b540aSrobert
2248*404b540aSrobert
2249*404b540aSrobert /* Return the symbol memory tag associated to pointer PTR. A memory
2250*404b540aSrobert tag is an artificial variable that represents the memory location
2251*404b540aSrobert pointed-to by PTR. It is used to model the effects of pointer
2252*404b540aSrobert de-references on addressable variables.
2253*404b540aSrobert
2254*404b540aSrobert AI points to the data gathered during alias analysis. This
2255*404b540aSrobert function populates the array AI->POINTERS. */
2256*404b540aSrobert
2257*404b540aSrobert static tree
get_tmt_for(tree ptr,struct alias_info * ai)2258*404b540aSrobert get_tmt_for (tree ptr, struct alias_info *ai)
2259*404b540aSrobert {
2260*404b540aSrobert size_t i;
2261*404b540aSrobert tree tag;
2262*404b540aSrobert tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2263*404b540aSrobert HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2264*404b540aSrobert
2265*404b540aSrobert /* We use a unique memory tag for all the ref-all pointers. */
2266*404b540aSrobert if (PTR_IS_REF_ALL (ptr))
2267*404b540aSrobert {
2268*404b540aSrobert if (!ai->ref_all_symbol_mem_tag)
2269*404b540aSrobert ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
2270*404b540aSrobert return ai->ref_all_symbol_mem_tag;
2271*404b540aSrobert }
2272*404b540aSrobert
2273*404b540aSrobert /* To avoid creating unnecessary memory tags, only create one memory tag
2274*404b540aSrobert per alias set class. Note that it may be tempting to group
2275*404b540aSrobert memory tags based on conflicting alias sets instead of
2276*404b540aSrobert equivalence. That would be wrong because alias sets are not
2277*404b540aSrobert necessarily transitive (as demonstrated by the libstdc++ test
2278*404b540aSrobert 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2279*404b540aSrobert such that conflicts (A, B) == true and conflicts (A, C) == true,
2280*404b540aSrobert it does not necessarily follow that conflicts (B, C) == true. */
2281*404b540aSrobert for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2282*404b540aSrobert {
2283*404b540aSrobert struct alias_map_d *curr = ai->pointers[i];
2284*404b540aSrobert tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
2285*404b540aSrobert if (tag_set == curr->set)
2286*404b540aSrobert {
2287*404b540aSrobert tag = curr_tag;
2288*404b540aSrobert break;
2289*404b540aSrobert }
2290*404b540aSrobert }
2291*404b540aSrobert
2292*404b540aSrobert /* If VAR cannot alias with any of the existing memory tags, create a new
2293*404b540aSrobert tag for PTR and add it to the POINTERS array. */
2294*404b540aSrobert if (tag == NULL_TREE)
2295*404b540aSrobert {
2296*404b540aSrobert struct alias_map_d *alias_map;
2297*404b540aSrobert
2298*404b540aSrobert /* If PTR did not have a symbol tag already, create a new SMT.*
2299*404b540aSrobert artificial variable representing the memory location
2300*404b540aSrobert pointed-to by PTR. */
2301*404b540aSrobert if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
2302*404b540aSrobert tag = create_memory_tag (tag_type, true);
2303*404b540aSrobert else
2304*404b540aSrobert tag = var_ann (ptr)->symbol_mem_tag;
2305*404b540aSrobert
2306*404b540aSrobert /* Add PTR to the POINTERS array. Note that we are not interested in
2307*404b540aSrobert PTR's alias set. Instead, we cache the alias set for the memory that
2308*404b540aSrobert PTR points to. */
2309*404b540aSrobert alias_map = XCNEW (struct alias_map_d);
2310*404b540aSrobert alias_map->var = ptr;
2311*404b540aSrobert alias_map->set = tag_set;
2312*404b540aSrobert ai->pointers[ai->num_pointers++] = alias_map;
2313*404b540aSrobert }
2314*404b540aSrobert
2315*404b540aSrobert /* If the pointed-to type is volatile, so is the tag. */
2316*404b540aSrobert TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2317*404b540aSrobert
2318*404b540aSrobert /* Make sure that the symbol tag has the same alias set as the
2319*404b540aSrobert pointed-to type. */
2320*404b540aSrobert gcc_assert (tag_set == get_alias_set (tag));
2321*404b540aSrobert
2322*404b540aSrobert return tag;
2323*404b540aSrobert }
2324*404b540aSrobert
2325*404b540aSrobert
2326*404b540aSrobert /* Create GLOBAL_VAR, an artificial global variable to act as a
2327*404b540aSrobert representative of all the variables that may be clobbered by function
2328*404b540aSrobert calls. */
2329*404b540aSrobert
2330*404b540aSrobert static void
create_global_var(void)2331*404b540aSrobert create_global_var (void)
2332*404b540aSrobert {
2333*404b540aSrobert global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2334*404b540aSrobert void_type_node);
2335*404b540aSrobert DECL_ARTIFICIAL (global_var) = 1;
2336*404b540aSrobert TREE_READONLY (global_var) = 0;
2337*404b540aSrobert DECL_EXTERNAL (global_var) = 1;
2338*404b540aSrobert TREE_STATIC (global_var) = 1;
2339*404b540aSrobert TREE_USED (global_var) = 1;
2340*404b540aSrobert DECL_CONTEXT (global_var) = NULL_TREE;
2341*404b540aSrobert TREE_THIS_VOLATILE (global_var) = 0;
2342*404b540aSrobert TREE_ADDRESSABLE (global_var) = 0;
2343*404b540aSrobert
2344*404b540aSrobert create_var_ann (global_var);
2345*404b540aSrobert mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2346*404b540aSrobert add_referenced_var (global_var);
2347*404b540aSrobert mark_sym_for_renaming (global_var);
2348*404b540aSrobert }
2349*404b540aSrobert
2350*404b540aSrobert
2351*404b540aSrobert /* Dump alias statistics on FILE. */
2352*404b540aSrobert
2353*404b540aSrobert static void
dump_alias_stats(FILE * file)2354*404b540aSrobert dump_alias_stats (FILE *file)
2355*404b540aSrobert {
2356*404b540aSrobert const char *funcname
2357*404b540aSrobert = lang_hooks.decl_printable_name (current_function_decl, 2);
2358*404b540aSrobert fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2359*404b540aSrobert fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2360*404b540aSrobert fprintf (file, "Total alias mayalias results:\t%u\n",
2361*404b540aSrobert alias_stats.alias_mayalias);
2362*404b540aSrobert fprintf (file, "Total alias noalias results:\t%u\n",
2363*404b540aSrobert alias_stats.alias_noalias);
2364*404b540aSrobert fprintf (file, "Total simple queries:\t%u\n",
2365*404b540aSrobert alias_stats.simple_queries);
2366*404b540aSrobert fprintf (file, "Total simple resolved:\t%u\n",
2367*404b540aSrobert alias_stats.simple_resolved);
2368*404b540aSrobert fprintf (file, "Total TBAA queries:\t%u\n",
2369*404b540aSrobert alias_stats.tbaa_queries);
2370*404b540aSrobert fprintf (file, "Total TBAA resolved:\t%u\n",
2371*404b540aSrobert alias_stats.tbaa_resolved);
2372*404b540aSrobert fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2373*404b540aSrobert alias_stats.structnoaddress_queries);
2374*404b540aSrobert fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2375*404b540aSrobert alias_stats.structnoaddress_resolved);
2376*404b540aSrobert }
2377*404b540aSrobert
2378*404b540aSrobert
2379*404b540aSrobert /* Dump alias information on FILE. */
2380*404b540aSrobert
2381*404b540aSrobert void
dump_alias_info(FILE * file)2382*404b540aSrobert dump_alias_info (FILE *file)
2383*404b540aSrobert {
2384*404b540aSrobert size_t i;
2385*404b540aSrobert const char *funcname
2386*404b540aSrobert = lang_hooks.decl_printable_name (current_function_decl, 2);
2387*404b540aSrobert referenced_var_iterator rvi;
2388*404b540aSrobert tree var;
2389*404b540aSrobert
2390*404b540aSrobert fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2391*404b540aSrobert
2392*404b540aSrobert fprintf (file, "Aliased symbols\n\n");
2393*404b540aSrobert
2394*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
2395*404b540aSrobert {
2396*404b540aSrobert if (may_be_aliased (var))
2397*404b540aSrobert dump_variable (file, var);
2398*404b540aSrobert }
2399*404b540aSrobert
2400*404b540aSrobert fprintf (file, "\nDereferenced pointers\n\n");
2401*404b540aSrobert
2402*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
2403*404b540aSrobert {
2404*404b540aSrobert var_ann_t ann = var_ann (var);
2405*404b540aSrobert if (ann->symbol_mem_tag)
2406*404b540aSrobert dump_variable (file, var);
2407*404b540aSrobert }
2408*404b540aSrobert
2409*404b540aSrobert fprintf (file, "\nSymbol memory tags\n\n");
2410*404b540aSrobert
2411*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
2412*404b540aSrobert {
2413*404b540aSrobert if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2414*404b540aSrobert dump_variable (file, var);
2415*404b540aSrobert }
2416*404b540aSrobert
2417*404b540aSrobert fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2418*404b540aSrobert
2419*404b540aSrobert fprintf (file, "SSA_NAME pointers\n\n");
2420*404b540aSrobert for (i = 1; i < num_ssa_names; i++)
2421*404b540aSrobert {
2422*404b540aSrobert tree ptr = ssa_name (i);
2423*404b540aSrobert struct ptr_info_def *pi;
2424*404b540aSrobert
2425*404b540aSrobert if (ptr == NULL_TREE)
2426*404b540aSrobert continue;
2427*404b540aSrobert
2428*404b540aSrobert pi = SSA_NAME_PTR_INFO (ptr);
2429*404b540aSrobert if (!SSA_NAME_IN_FREE_LIST (ptr)
2430*404b540aSrobert && pi
2431*404b540aSrobert && pi->name_mem_tag)
2432*404b540aSrobert dump_points_to_info_for (file, ptr);
2433*404b540aSrobert }
2434*404b540aSrobert
2435*404b540aSrobert fprintf (file, "\nName memory tags\n\n");
2436*404b540aSrobert
2437*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
2438*404b540aSrobert {
2439*404b540aSrobert if (TREE_CODE (var) == NAME_MEMORY_TAG)
2440*404b540aSrobert dump_variable (file, var);
2441*404b540aSrobert }
2442*404b540aSrobert
2443*404b540aSrobert fprintf (file, "\n");
2444*404b540aSrobert }
2445*404b540aSrobert
2446*404b540aSrobert
2447*404b540aSrobert /* Dump alias information on stderr. */
2448*404b540aSrobert
2449*404b540aSrobert void
debug_alias_info(void)2450*404b540aSrobert debug_alias_info (void)
2451*404b540aSrobert {
2452*404b540aSrobert dump_alias_info (stderr);
2453*404b540aSrobert }
2454*404b540aSrobert
2455*404b540aSrobert
2456*404b540aSrobert /* Return the alias information associated with pointer T. It creates a
2457*404b540aSrobert new instance if none existed. */
2458*404b540aSrobert
2459*404b540aSrobert struct ptr_info_def *
get_ptr_info(tree t)2460*404b540aSrobert get_ptr_info (tree t)
2461*404b540aSrobert {
2462*404b540aSrobert struct ptr_info_def *pi;
2463*404b540aSrobert
2464*404b540aSrobert gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2465*404b540aSrobert
2466*404b540aSrobert pi = SSA_NAME_PTR_INFO (t);
2467*404b540aSrobert if (pi == NULL)
2468*404b540aSrobert {
2469*404b540aSrobert pi = GGC_NEW (struct ptr_info_def);
2470*404b540aSrobert memset ((void *)pi, 0, sizeof (*pi));
2471*404b540aSrobert SSA_NAME_PTR_INFO (t) = pi;
2472*404b540aSrobert }
2473*404b540aSrobert
2474*404b540aSrobert return pi;
2475*404b540aSrobert }
2476*404b540aSrobert
2477*404b540aSrobert
2478*404b540aSrobert /* Dump points-to information for SSA_NAME PTR into FILE. */
2479*404b540aSrobert
2480*404b540aSrobert void
dump_points_to_info_for(FILE * file,tree ptr)2481*404b540aSrobert dump_points_to_info_for (FILE *file, tree ptr)
2482*404b540aSrobert {
2483*404b540aSrobert struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2484*404b540aSrobert
2485*404b540aSrobert print_generic_expr (file, ptr, dump_flags);
2486*404b540aSrobert
2487*404b540aSrobert if (pi)
2488*404b540aSrobert {
2489*404b540aSrobert if (pi->name_mem_tag)
2490*404b540aSrobert {
2491*404b540aSrobert fprintf (file, ", name memory tag: ");
2492*404b540aSrobert print_generic_expr (file, pi->name_mem_tag, dump_flags);
2493*404b540aSrobert }
2494*404b540aSrobert
2495*404b540aSrobert if (pi->is_dereferenced)
2496*404b540aSrobert fprintf (file, ", is dereferenced");
2497*404b540aSrobert
2498*404b540aSrobert if (pi->value_escapes_p)
2499*404b540aSrobert fprintf (file, ", its value escapes");
2500*404b540aSrobert
2501*404b540aSrobert if (pi->pt_anything)
2502*404b540aSrobert fprintf (file, ", points-to anything");
2503*404b540aSrobert
2504*404b540aSrobert if (pi->pt_null)
2505*404b540aSrobert fprintf (file, ", points-to NULL");
2506*404b540aSrobert
2507*404b540aSrobert if (pi->pt_vars)
2508*404b540aSrobert {
2509*404b540aSrobert unsigned ix;
2510*404b540aSrobert bitmap_iterator bi;
2511*404b540aSrobert
2512*404b540aSrobert fprintf (file, ", points-to vars: { ");
2513*404b540aSrobert EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2514*404b540aSrobert {
2515*404b540aSrobert print_generic_expr (file, referenced_var (ix), dump_flags);
2516*404b540aSrobert fprintf (file, " ");
2517*404b540aSrobert }
2518*404b540aSrobert fprintf (file, "}");
2519*404b540aSrobert }
2520*404b540aSrobert }
2521*404b540aSrobert
2522*404b540aSrobert fprintf (file, "\n");
2523*404b540aSrobert }
2524*404b540aSrobert
2525*404b540aSrobert
2526*404b540aSrobert /* Dump points-to information for VAR into stderr. */
2527*404b540aSrobert
2528*404b540aSrobert void
debug_points_to_info_for(tree var)2529*404b540aSrobert debug_points_to_info_for (tree var)
2530*404b540aSrobert {
2531*404b540aSrobert dump_points_to_info_for (stderr, var);
2532*404b540aSrobert }
2533*404b540aSrobert
2534*404b540aSrobert
2535*404b540aSrobert /* Dump points-to information into FILE. NOTE: This function is slow, as
2536*404b540aSrobert it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2537*404b540aSrobert
2538*404b540aSrobert void
dump_points_to_info(FILE * file)2539*404b540aSrobert dump_points_to_info (FILE *file)
2540*404b540aSrobert {
2541*404b540aSrobert basic_block bb;
2542*404b540aSrobert block_stmt_iterator si;
2543*404b540aSrobert ssa_op_iter iter;
2544*404b540aSrobert const char *fname =
2545*404b540aSrobert lang_hooks.decl_printable_name (current_function_decl, 2);
2546*404b540aSrobert referenced_var_iterator rvi;
2547*404b540aSrobert tree var;
2548*404b540aSrobert
2549*404b540aSrobert fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2550*404b540aSrobert
2551*404b540aSrobert /* First dump points-to information for the default definitions of
2552*404b540aSrobert pointer variables. This is necessary because default definitions are
2553*404b540aSrobert not part of the code. */
2554*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
2555*404b540aSrobert {
2556*404b540aSrobert if (POINTER_TYPE_P (TREE_TYPE (var)))
2557*404b540aSrobert {
2558*404b540aSrobert tree def = default_def (var);
2559*404b540aSrobert if (def)
2560*404b540aSrobert dump_points_to_info_for (file, def);
2561*404b540aSrobert }
2562*404b540aSrobert }
2563*404b540aSrobert
2564*404b540aSrobert /* Dump points-to information for every pointer defined in the program. */
2565*404b540aSrobert FOR_EACH_BB (bb)
2566*404b540aSrobert {
2567*404b540aSrobert tree phi;
2568*404b540aSrobert
2569*404b540aSrobert for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2570*404b540aSrobert {
2571*404b540aSrobert tree ptr = PHI_RESULT (phi);
2572*404b540aSrobert if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2573*404b540aSrobert dump_points_to_info_for (file, ptr);
2574*404b540aSrobert }
2575*404b540aSrobert
2576*404b540aSrobert for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2577*404b540aSrobert {
2578*404b540aSrobert tree stmt = bsi_stmt (si);
2579*404b540aSrobert tree def;
2580*404b540aSrobert FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2581*404b540aSrobert if (POINTER_TYPE_P (TREE_TYPE (def)))
2582*404b540aSrobert dump_points_to_info_for (file, def);
2583*404b540aSrobert }
2584*404b540aSrobert }
2585*404b540aSrobert
2586*404b540aSrobert fprintf (file, "\n");
2587*404b540aSrobert }
2588*404b540aSrobert
2589*404b540aSrobert
2590*404b540aSrobert /* Dump points-to info pointed to by PTO into STDERR. */
2591*404b540aSrobert
2592*404b540aSrobert void
debug_points_to_info(void)2593*404b540aSrobert debug_points_to_info (void)
2594*404b540aSrobert {
2595*404b540aSrobert dump_points_to_info (stderr);
2596*404b540aSrobert }
2597*404b540aSrobert
2598*404b540aSrobert /* Dump to FILE the list of variables that may be aliasing VAR. */
2599*404b540aSrobert
2600*404b540aSrobert void
dump_may_aliases_for(FILE * file,tree var)2601*404b540aSrobert dump_may_aliases_for (FILE *file, tree var)
2602*404b540aSrobert {
2603*404b540aSrobert VEC(tree, gc) *aliases;
2604*404b540aSrobert
2605*404b540aSrobert if (TREE_CODE (var) == SSA_NAME)
2606*404b540aSrobert var = SSA_NAME_VAR (var);
2607*404b540aSrobert
2608*404b540aSrobert aliases = var_ann (var)->may_aliases;
2609*404b540aSrobert if (aliases)
2610*404b540aSrobert {
2611*404b540aSrobert size_t i;
2612*404b540aSrobert tree al;
2613*404b540aSrobert fprintf (file, "{ ");
2614*404b540aSrobert for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2615*404b540aSrobert {
2616*404b540aSrobert print_generic_expr (file, al, dump_flags);
2617*404b540aSrobert fprintf (file, " ");
2618*404b540aSrobert }
2619*404b540aSrobert fprintf (file, "}");
2620*404b540aSrobert }
2621*404b540aSrobert }
2622*404b540aSrobert
2623*404b540aSrobert
2624*404b540aSrobert /* Dump to stderr the list of variables that may be aliasing VAR. */
2625*404b540aSrobert
2626*404b540aSrobert void
debug_may_aliases_for(tree var)2627*404b540aSrobert debug_may_aliases_for (tree var)
2628*404b540aSrobert {
2629*404b540aSrobert dump_may_aliases_for (stderr, var);
2630*404b540aSrobert }
2631*404b540aSrobert
2632*404b540aSrobert /* Return true if VAR may be aliased. */
2633*404b540aSrobert
2634*404b540aSrobert bool
may_be_aliased(tree var)2635*404b540aSrobert may_be_aliased (tree var)
2636*404b540aSrobert {
2637*404b540aSrobert /* Obviously. */
2638*404b540aSrobert if (TREE_ADDRESSABLE (var))
2639*404b540aSrobert return true;
2640*404b540aSrobert
2641*404b540aSrobert /* Globally visible variables can have their addresses taken by other
2642*404b540aSrobert translation units. */
2643*404b540aSrobert
2644*404b540aSrobert if (MTAG_P (var)
2645*404b540aSrobert && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2646*404b540aSrobert return true;
2647*404b540aSrobert else if (!MTAG_P (var)
2648*404b540aSrobert && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2649*404b540aSrobert return true;
2650*404b540aSrobert
2651*404b540aSrobert /* Automatic variables can't have their addresses escape any other way.
2652*404b540aSrobert This must be after the check for global variables, as extern declarations
2653*404b540aSrobert do not have TREE_STATIC set. */
2654*404b540aSrobert if (!TREE_STATIC (var))
2655*404b540aSrobert return false;
2656*404b540aSrobert
2657*404b540aSrobert /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2658*404b540aSrobert of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2659*404b540aSrobert we can only be sure the variable isn't addressable if it's local to the
2660*404b540aSrobert current function. */
2661*404b540aSrobert if (flag_unit_at_a_time)
2662*404b540aSrobert return false;
2663*404b540aSrobert if (decl_function_context (var) == current_function_decl)
2664*404b540aSrobert return false;
2665*404b540aSrobert
2666*404b540aSrobert return true;
2667*404b540aSrobert }
2668*404b540aSrobert
2669*404b540aSrobert
2670*404b540aSrobert /* Given two symbols return TRUE if one is in the alias set of the other. */
2671*404b540aSrobert bool
is_aliased_with(tree tag,tree sym)2672*404b540aSrobert is_aliased_with (tree tag, tree sym)
2673*404b540aSrobert {
2674*404b540aSrobert size_t i;
2675*404b540aSrobert VEC(tree,gc) *aliases;
2676*404b540aSrobert tree al;
2677*404b540aSrobert
2678*404b540aSrobert if (var_ann (sym)->is_aliased)
2679*404b540aSrobert {
2680*404b540aSrobert aliases = var_ann (tag)->may_aliases;
2681*404b540aSrobert
2682*404b540aSrobert if (aliases == NULL)
2683*404b540aSrobert return false;
2684*404b540aSrobert
2685*404b540aSrobert for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2686*404b540aSrobert if (al == sym)
2687*404b540aSrobert return true;
2688*404b540aSrobert }
2689*404b540aSrobert else
2690*404b540aSrobert {
2691*404b540aSrobert aliases = var_ann (sym)->may_aliases;
2692*404b540aSrobert
2693*404b540aSrobert if (aliases == NULL)
2694*404b540aSrobert return false;
2695*404b540aSrobert
2696*404b540aSrobert for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2697*404b540aSrobert if (al == tag)
2698*404b540aSrobert return true;
2699*404b540aSrobert }
2700*404b540aSrobert
2701*404b540aSrobert return false;
2702*404b540aSrobert }
2703*404b540aSrobert
2704*404b540aSrobert
2705*404b540aSrobert /* Given two tags return TRUE if their may-alias sets intersect. */
2706*404b540aSrobert
2707*404b540aSrobert bool
may_aliases_intersect(tree tag1,tree tag2)2708*404b540aSrobert may_aliases_intersect (tree tag1, tree tag2)
2709*404b540aSrobert {
2710*404b540aSrobert struct pointer_set_t *set1 = pointer_set_create ();
2711*404b540aSrobert unsigned i;
2712*404b540aSrobert VEC(tree,gc) *may_aliases1 = may_aliases (tag1);
2713*404b540aSrobert VEC(tree,gc) *may_aliases2 = may_aliases (tag2);
2714*404b540aSrobert tree sym;
2715*404b540aSrobert
2716*404b540aSrobert /* Insert all the symbols from the first may-alias set into the
2717*404b540aSrobert pointer-set. */
2718*404b540aSrobert for (i = 0; VEC_iterate (tree, may_aliases1, i, sym); i++)
2719*404b540aSrobert pointer_set_insert (set1, sym);
2720*404b540aSrobert
2721*404b540aSrobert /* Go through the second may-alias set and check if it contains symbols that
2722*404b540aSrobert are common with the first set. */
2723*404b540aSrobert for (i = 0; VEC_iterate (tree, may_aliases2, i, sym); i++)
2724*404b540aSrobert if (pointer_set_contains (set1, sym))
2725*404b540aSrobert {
2726*404b540aSrobert pointer_set_destroy (set1);
2727*404b540aSrobert return true;
2728*404b540aSrobert }
2729*404b540aSrobert
2730*404b540aSrobert pointer_set_destroy (set1);
2731*404b540aSrobert return false;
2732*404b540aSrobert }
2733*404b540aSrobert
2734*404b540aSrobert
2735*404b540aSrobert /* The following is based on code in add_stmt_operand to ensure that the
2736*404b540aSrobert same defs/uses/vdefs/vuses will be found after replacing a reference
2737*404b540aSrobert to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2738*404b540aSrobert is the address of var. Return a memtag for the ptr, after adding the
2739*404b540aSrobert proper may_aliases to it (which are the aliases of var, if it has any,
2740*404b540aSrobert or var itself). */
2741*404b540aSrobert
2742*404b540aSrobert static tree
add_may_alias_for_new_tag(tree tag,tree var)2743*404b540aSrobert add_may_alias_for_new_tag (tree tag, tree var)
2744*404b540aSrobert {
2745*404b540aSrobert var_ann_t v_ann = var_ann (var);
2746*404b540aSrobert VEC(tree, gc) *aliases = v_ann->may_aliases;
2747*404b540aSrobert
2748*404b540aSrobert /* Case 1: |aliases| == 1 */
2749*404b540aSrobert if ((aliases != NULL)
2750*404b540aSrobert && (VEC_length (tree, aliases) == 1))
2751*404b540aSrobert {
2752*404b540aSrobert tree ali = VEC_index (tree, aliases, 0);
2753*404b540aSrobert
2754*404b540aSrobert if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2755*404b540aSrobert return ali;
2756*404b540aSrobert }
2757*404b540aSrobert
2758*404b540aSrobert /* Case 2: |aliases| == 0 */
2759*404b540aSrobert if (aliases == NULL)
2760*404b540aSrobert add_may_alias (tag, var);
2761*404b540aSrobert else
2762*404b540aSrobert {
2763*404b540aSrobert /* Case 3: |aliases| > 1 */
2764*404b540aSrobert unsigned i;
2765*404b540aSrobert tree al;
2766*404b540aSrobert
2767*404b540aSrobert for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2768*404b540aSrobert add_may_alias (tag, al);
2769*404b540aSrobert }
2770*404b540aSrobert
2771*404b540aSrobert return tag;
2772*404b540aSrobert }
2773*404b540aSrobert
2774*404b540aSrobert /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2775*404b540aSrobert tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
2776*404b540aSrobert according to the location accessed by EXPR.
2777*404b540aSrobert
2778*404b540aSrobert Note, the set of aliases represented by the new symbol tag are not marked
2779*404b540aSrobert for renaming. */
2780*404b540aSrobert
2781*404b540aSrobert void
new_type_alias(tree ptr,tree var,tree expr)2782*404b540aSrobert new_type_alias (tree ptr, tree var, tree expr)
2783*404b540aSrobert {
2784*404b540aSrobert var_ann_t p_ann = var_ann (ptr);
2785*404b540aSrobert tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2786*404b540aSrobert tree tag;
2787*404b540aSrobert subvar_t svars;
2788*404b540aSrobert tree ali = NULL_TREE;
2789*404b540aSrobert HOST_WIDE_INT offset, size, maxsize;
2790*404b540aSrobert tree ref;
2791*404b540aSrobert
2792*404b540aSrobert gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
2793*404b540aSrobert gcc_assert (!MTAG_P (var));
2794*404b540aSrobert
2795*404b540aSrobert ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2796*404b540aSrobert gcc_assert (ref);
2797*404b540aSrobert
2798*404b540aSrobert tag = create_memory_tag (tag_type, true);
2799*404b540aSrobert p_ann->symbol_mem_tag = tag;
2800*404b540aSrobert
2801*404b540aSrobert /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2802*404b540aSrobert subvars, add the subvars to the tag instead of the actual var. */
2803*404b540aSrobert if (var_can_have_subvars (var)
2804*404b540aSrobert && (svars = get_subvars_for_var (var)))
2805*404b540aSrobert {
2806*404b540aSrobert subvar_t sv;
2807*404b540aSrobert VEC (tree, heap) *overlaps = NULL;
2808*404b540aSrobert unsigned int len;
2809*404b540aSrobert
2810*404b540aSrobert for (sv = svars; sv; sv = sv->next)
2811*404b540aSrobert {
2812*404b540aSrobert bool exact;
2813*404b540aSrobert
2814*404b540aSrobert if (overlap_subvar (offset, maxsize, sv->var, &exact))
2815*404b540aSrobert VEC_safe_push (tree, heap, overlaps, sv->var);
2816*404b540aSrobert }
2817*404b540aSrobert len = VEC_length (tree, overlaps);
2818*404b540aSrobert if (dump_file && (dump_flags & TDF_DETAILS))
2819*404b540aSrobert fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
2820*404b540aSrobert gcc_assert (len);
2821*404b540aSrobert
2822*404b540aSrobert if (len == 1)
2823*404b540aSrobert ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
2824*404b540aSrobert else if (len > 1)
2825*404b540aSrobert {
2826*404b540aSrobert unsigned int k;
2827*404b540aSrobert tree sv_var;
2828*404b540aSrobert
2829*404b540aSrobert for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
2830*404b540aSrobert {
2831*404b540aSrobert ali = add_may_alias_for_new_tag (tag, sv_var);
2832*404b540aSrobert
2833*404b540aSrobert if (ali != tag)
2834*404b540aSrobert {
2835*404b540aSrobert /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
2836*404b540aSrobert took place. Since more than one svar was found, we add
2837*404b540aSrobert 'ali' as one of the may_aliases of the new tag. */
2838*404b540aSrobert add_may_alias (tag, ali);
2839*404b540aSrobert ali = tag;
2840*404b540aSrobert }
2841*404b540aSrobert }
2842*404b540aSrobert }
2843*404b540aSrobert }
2844*404b540aSrobert else
2845*404b540aSrobert ali = add_may_alias_for_new_tag (tag, var);
2846*404b540aSrobert
2847*404b540aSrobert p_ann->symbol_mem_tag = ali;
2848*404b540aSrobert TREE_READONLY (tag) = TREE_READONLY (var);
2849*404b540aSrobert MTAG_GLOBAL (tag) = is_global_var (var);
2850*404b540aSrobert }
2851*404b540aSrobert
2852*404b540aSrobert /* This represents the used range of a variable. */
2853*404b540aSrobert
2854*404b540aSrobert typedef struct used_part
2855*404b540aSrobert {
2856*404b540aSrobert HOST_WIDE_INT minused;
2857*404b540aSrobert HOST_WIDE_INT maxused;
2858*404b540aSrobert /* True if we have an explicit use/def of some portion of this variable,
2859*404b540aSrobert even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2860*404b540aSrobert bool explicit_uses;
2861*404b540aSrobert /* True if we have an implicit use/def of some portion of this
2862*404b540aSrobert variable. Implicit uses occur when we can't tell what part we
2863*404b540aSrobert are referencing, and have to make conservative assumptions. */
2864*404b540aSrobert bool implicit_uses;
2865*404b540aSrobert /* True if the structure is only written to or taken its address. */
2866*404b540aSrobert bool write_only;
2867*404b540aSrobert } *used_part_t;
2868*404b540aSrobert
2869*404b540aSrobert /* An array of used_part structures, indexed by variable uid. */
2870*404b540aSrobert
2871*404b540aSrobert static htab_t used_portions;
2872*404b540aSrobert
2873*404b540aSrobert struct used_part_map
2874*404b540aSrobert {
2875*404b540aSrobert unsigned int uid;
2876*404b540aSrobert used_part_t to;
2877*404b540aSrobert };
2878*404b540aSrobert
2879*404b540aSrobert /* Return true if the uid in the two used part maps are equal. */
2880*404b540aSrobert
2881*404b540aSrobert static int
used_part_map_eq(const void * va,const void * vb)2882*404b540aSrobert used_part_map_eq (const void *va, const void *vb)
2883*404b540aSrobert {
2884*404b540aSrobert const struct used_part_map *a = (const struct used_part_map *) va;
2885*404b540aSrobert const struct used_part_map *b = (const struct used_part_map *) vb;
2886*404b540aSrobert return (a->uid == b->uid);
2887*404b540aSrobert }
2888*404b540aSrobert
2889*404b540aSrobert /* Hash a from uid in a used_part_map. */
2890*404b540aSrobert
2891*404b540aSrobert static unsigned int
used_part_map_hash(const void * item)2892*404b540aSrobert used_part_map_hash (const void *item)
2893*404b540aSrobert {
2894*404b540aSrobert return ((const struct used_part_map *)item)->uid;
2895*404b540aSrobert }
2896*404b540aSrobert
2897*404b540aSrobert /* Free a used part map element. */
2898*404b540aSrobert
2899*404b540aSrobert static void
free_used_part_map(void * item)2900*404b540aSrobert free_used_part_map (void *item)
2901*404b540aSrobert {
2902*404b540aSrobert free (((struct used_part_map *)item)->to);
2903*404b540aSrobert free (item);
2904*404b540aSrobert }
2905*404b540aSrobert
2906*404b540aSrobert /* Lookup a used_part structure for a UID. */
2907*404b540aSrobert
2908*404b540aSrobert static used_part_t
up_lookup(unsigned int uid)2909*404b540aSrobert up_lookup (unsigned int uid)
2910*404b540aSrobert {
2911*404b540aSrobert struct used_part_map *h, in;
2912*404b540aSrobert in.uid = uid;
2913*404b540aSrobert h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2914*404b540aSrobert if (!h)
2915*404b540aSrobert return NULL;
2916*404b540aSrobert return h->to;
2917*404b540aSrobert }
2918*404b540aSrobert
2919*404b540aSrobert /* Insert the pair UID, TO into the used part hashtable. */
2920*404b540aSrobert
2921*404b540aSrobert static void
up_insert(unsigned int uid,used_part_t to)2922*404b540aSrobert up_insert (unsigned int uid, used_part_t to)
2923*404b540aSrobert {
2924*404b540aSrobert struct used_part_map *h;
2925*404b540aSrobert void **loc;
2926*404b540aSrobert
2927*404b540aSrobert h = XNEW (struct used_part_map);
2928*404b540aSrobert h->uid = uid;
2929*404b540aSrobert h->to = to;
2930*404b540aSrobert loc = htab_find_slot_with_hash (used_portions, h,
2931*404b540aSrobert uid, INSERT);
2932*404b540aSrobert if (*loc != NULL)
2933*404b540aSrobert free (*loc);
2934*404b540aSrobert *(struct used_part_map **) loc = h;
2935*404b540aSrobert }
2936*404b540aSrobert
2937*404b540aSrobert
2938*404b540aSrobert /* Given a variable uid, UID, get or create the entry in the used portions
2939*404b540aSrobert table for the variable. */
2940*404b540aSrobert
2941*404b540aSrobert static used_part_t
get_or_create_used_part_for(size_t uid)2942*404b540aSrobert get_or_create_used_part_for (size_t uid)
2943*404b540aSrobert {
2944*404b540aSrobert used_part_t up;
2945*404b540aSrobert if ((up = up_lookup (uid)) == NULL)
2946*404b540aSrobert {
2947*404b540aSrobert up = XCNEW (struct used_part);
2948*404b540aSrobert up->minused = INT_MAX;
2949*404b540aSrobert up->maxused = 0;
2950*404b540aSrobert up->explicit_uses = false;
2951*404b540aSrobert up->implicit_uses = false;
2952*404b540aSrobert up->write_only = true;
2953*404b540aSrobert }
2954*404b540aSrobert
2955*404b540aSrobert return up;
2956*404b540aSrobert }
2957*404b540aSrobert
2958*404b540aSrobert
2959*404b540aSrobert /* Create and return a structure sub-variable for field type FIELD at
2960*404b540aSrobert offset OFFSET, with size SIZE, of variable VAR. */
2961*404b540aSrobert
2962*404b540aSrobert static tree
create_sft(tree var,tree field,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size)2963*404b540aSrobert create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2964*404b540aSrobert unsigned HOST_WIDE_INT size)
2965*404b540aSrobert {
2966*404b540aSrobert var_ann_t ann;
2967*404b540aSrobert tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2968*404b540aSrobert
2969*404b540aSrobert /* We need to copy the various flags from VAR to SUBVAR, so that
2970*404b540aSrobert they are is_global_var iff the original variable was. */
2971*404b540aSrobert DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2972*404b540aSrobert MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2973*404b540aSrobert TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2974*404b540aSrobert TREE_STATIC (subvar) = TREE_STATIC (var);
2975*404b540aSrobert TREE_READONLY (subvar) = TREE_READONLY (var);
2976*404b540aSrobert TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
2977*404b540aSrobert
2978*404b540aSrobert /* Add the new variable to REFERENCED_VARS. */
2979*404b540aSrobert ann = get_var_ann (subvar);
2980*404b540aSrobert ann->symbol_mem_tag = NULL;
2981*404b540aSrobert add_referenced_var (subvar);
2982*404b540aSrobert SFT_PARENT_VAR (subvar) = var;
2983*404b540aSrobert SFT_OFFSET (subvar) = offset;
2984*404b540aSrobert SFT_SIZE (subvar) = size;
2985*404b540aSrobert return subvar;
2986*404b540aSrobert }
2987*404b540aSrobert
2988*404b540aSrobert
2989*404b540aSrobert /* Given an aggregate VAR, create the subvariables that represent its
2990*404b540aSrobert fields. */
2991*404b540aSrobert
2992*404b540aSrobert static void
create_overlap_variables_for(tree var)2993*404b540aSrobert create_overlap_variables_for (tree var)
2994*404b540aSrobert {
2995*404b540aSrobert VEC(fieldoff_s,heap) *fieldstack = NULL;
2996*404b540aSrobert used_part_t up;
2997*404b540aSrobert size_t uid = DECL_UID (var);
2998*404b540aSrobert
2999*404b540aSrobert up = up_lookup (uid);
3000*404b540aSrobert if (!up
3001*404b540aSrobert || up->write_only)
3002*404b540aSrobert return;
3003*404b540aSrobert
3004*404b540aSrobert push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
3005*404b540aSrobert if (VEC_length (fieldoff_s, fieldstack) != 0)
3006*404b540aSrobert {
3007*404b540aSrobert subvar_t *subvars;
3008*404b540aSrobert fieldoff_s *fo;
3009*404b540aSrobert bool notokay = false;
3010*404b540aSrobert int fieldcount = 0;
3011*404b540aSrobert int i;
3012*404b540aSrobert HOST_WIDE_INT lastfooffset = -1;
3013*404b540aSrobert HOST_WIDE_INT lastfosize = -1;
3014*404b540aSrobert tree lastfotype = NULL_TREE;
3015*404b540aSrobert
3016*404b540aSrobert /* Not all fields have DECL_SIZE set, and those that don't, we don't
3017*404b540aSrobert know their size, and thus, can't handle.
3018*404b540aSrobert The same is true of fields with DECL_SIZE that is not an integer
3019*404b540aSrobert constant (such as variable sized fields).
3020*404b540aSrobert Fields with offsets which are not constant will have an offset < 0
3021*404b540aSrobert We *could* handle fields that are constant sized arrays, but
3022*404b540aSrobert currently don't. Doing so would require some extra changes to
3023*404b540aSrobert tree-ssa-operands.c. */
3024*404b540aSrobert
3025*404b540aSrobert for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3026*404b540aSrobert {
3027*404b540aSrobert if (!fo->size
3028*404b540aSrobert || TREE_CODE (fo->size) != INTEGER_CST
3029*404b540aSrobert || fo->offset < 0)
3030*404b540aSrobert {
3031*404b540aSrobert notokay = true;
3032*404b540aSrobert break;
3033*404b540aSrobert }
3034*404b540aSrobert fieldcount++;
3035*404b540aSrobert }
3036*404b540aSrobert
3037*404b540aSrobert /* The current heuristic we use is as follows:
3038*404b540aSrobert If the variable has no used portions in this function, no
3039*404b540aSrobert structure vars are created for it.
3040*404b540aSrobert Otherwise,
3041*404b540aSrobert If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
3042*404b540aSrobert we always create structure vars for them.
3043*404b540aSrobert If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3044*404b540aSrobert some explicit uses, we create structure vars for them.
3045*404b540aSrobert If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3046*404b540aSrobert no explicit uses, we do not create structure vars for them.
3047*404b540aSrobert */
3048*404b540aSrobert
3049*404b540aSrobert if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
3050*404b540aSrobert && !up->explicit_uses)
3051*404b540aSrobert {
3052*404b540aSrobert if (dump_file && (dump_flags & TDF_DETAILS))
3053*404b540aSrobert {
3054*404b540aSrobert fprintf (dump_file, "Variable ");
3055*404b540aSrobert print_generic_expr (dump_file, var, 0);
3056*404b540aSrobert fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
3057*404b540aSrobert }
3058*404b540aSrobert notokay = true;
3059*404b540aSrobert }
3060*404b540aSrobert
3061*404b540aSrobert /* Bail out, if we can't create overlap variables. */
3062*404b540aSrobert if (notokay)
3063*404b540aSrobert {
3064*404b540aSrobert VEC_free (fieldoff_s, heap, fieldstack);
3065*404b540aSrobert return;
3066*404b540aSrobert }
3067*404b540aSrobert
3068*404b540aSrobert /* Otherwise, create the variables. */
3069*404b540aSrobert subvars = lookup_subvars_for_var (var);
3070*404b540aSrobert
3071*404b540aSrobert sort_fieldstack (fieldstack);
3072*404b540aSrobert
3073*404b540aSrobert for (i = VEC_length (fieldoff_s, fieldstack);
3074*404b540aSrobert VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
3075*404b540aSrobert {
3076*404b540aSrobert subvar_t sv;
3077*404b540aSrobert HOST_WIDE_INT fosize;
3078*404b540aSrobert tree currfotype;
3079*404b540aSrobert
3080*404b540aSrobert fosize = TREE_INT_CST_LOW (fo->size);
3081*404b540aSrobert currfotype = fo->type;
3082*404b540aSrobert
3083*404b540aSrobert /* If this field isn't in the used portion,
3084*404b540aSrobert or it has the exact same offset and size as the last
3085*404b540aSrobert field, skip it. */
3086*404b540aSrobert
3087*404b540aSrobert if (((fo->offset <= up->minused
3088*404b540aSrobert && fo->offset + fosize <= up->minused)
3089*404b540aSrobert || fo->offset >= up->maxused)
3090*404b540aSrobert || (fo->offset == lastfooffset
3091*404b540aSrobert && fosize == lastfosize
3092*404b540aSrobert && currfotype == lastfotype))
3093*404b540aSrobert continue;
3094*404b540aSrobert sv = GGC_NEW (struct subvar);
3095*404b540aSrobert sv->next = *subvars;
3096*404b540aSrobert sv->var = create_sft (var, fo->type, fo->offset, fosize);
3097*404b540aSrobert
3098*404b540aSrobert if (dump_file)
3099*404b540aSrobert {
3100*404b540aSrobert fprintf (dump_file, "structure field tag %s created for var %s",
3101*404b540aSrobert get_name (sv->var), get_name (var));
3102*404b540aSrobert fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3103*404b540aSrobert SFT_OFFSET (sv->var));
3104*404b540aSrobert fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3105*404b540aSrobert SFT_SIZE (sv->var));
3106*404b540aSrobert fprintf (dump_file, "\n");
3107*404b540aSrobert }
3108*404b540aSrobert
3109*404b540aSrobert lastfotype = currfotype;
3110*404b540aSrobert lastfooffset = fo->offset;
3111*404b540aSrobert lastfosize = fosize;
3112*404b540aSrobert *subvars = sv;
3113*404b540aSrobert }
3114*404b540aSrobert
3115*404b540aSrobert /* Once we have created subvars, the original is no longer call
3116*404b540aSrobert clobbered on its own. Its call clobbered status depends
3117*404b540aSrobert completely on the call clobbered status of the subvars.
3118*404b540aSrobert
3119*404b540aSrobert add_referenced_var in the above loop will take care of
3120*404b540aSrobert marking subvars of global variables as call clobbered for us
3121*404b540aSrobert to start, since they are global as well. */
3122*404b540aSrobert clear_call_clobbered (var);
3123*404b540aSrobert }
3124*404b540aSrobert
3125*404b540aSrobert VEC_free (fieldoff_s, heap, fieldstack);
3126*404b540aSrobert }
3127*404b540aSrobert
3128*404b540aSrobert
3129*404b540aSrobert /* Find the conservative answer to the question of what portions of what
3130*404b540aSrobert structures are used by this statement. We assume that if we have a
3131*404b540aSrobert component ref with a known size + offset, that we only need that part
3132*404b540aSrobert of the structure. For unknown cases, or cases where we do something
3133*404b540aSrobert to the whole structure, we assume we need to create fields for the
3134*404b540aSrobert entire structure. */
3135*404b540aSrobert
3136*404b540aSrobert static tree
find_used_portions(tree * tp,int * walk_subtrees,void * lhs_p)3137*404b540aSrobert find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3138*404b540aSrobert {
3139*404b540aSrobert switch (TREE_CODE (*tp))
3140*404b540aSrobert {
3141*404b540aSrobert case MODIFY_EXPR:
3142*404b540aSrobert /* Recurse manually here to track whether the use is in the
3143*404b540aSrobert LHS of an assignment. */
3144*404b540aSrobert find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
3145*404b540aSrobert return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
3146*404b540aSrobert case REALPART_EXPR:
3147*404b540aSrobert case IMAGPART_EXPR:
3148*404b540aSrobert case COMPONENT_REF:
3149*404b540aSrobert case ARRAY_REF:
3150*404b540aSrobert {
3151*404b540aSrobert HOST_WIDE_INT bitsize;
3152*404b540aSrobert HOST_WIDE_INT bitmaxsize;
3153*404b540aSrobert HOST_WIDE_INT bitpos;
3154*404b540aSrobert tree ref;
3155*404b540aSrobert ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3156*404b540aSrobert if (DECL_P (ref)
3157*404b540aSrobert && var_can_have_subvars (ref)
3158*404b540aSrobert && bitmaxsize != -1)
3159*404b540aSrobert {
3160*404b540aSrobert size_t uid = DECL_UID (ref);
3161*404b540aSrobert used_part_t up;
3162*404b540aSrobert
3163*404b540aSrobert up = get_or_create_used_part_for (uid);
3164*404b540aSrobert
3165*404b540aSrobert if (bitpos <= up->minused)
3166*404b540aSrobert up->minused = bitpos;
3167*404b540aSrobert if ((bitpos + bitmaxsize >= up->maxused))
3168*404b540aSrobert up->maxused = bitpos + bitmaxsize;
3169*404b540aSrobert
3170*404b540aSrobert if (bitsize == bitmaxsize)
3171*404b540aSrobert up->explicit_uses = true;
3172*404b540aSrobert else
3173*404b540aSrobert up->implicit_uses = true;
3174*404b540aSrobert if (!lhs_p)
3175*404b540aSrobert up->write_only = false;
3176*404b540aSrobert up_insert (uid, up);
3177*404b540aSrobert
3178*404b540aSrobert *walk_subtrees = 0;
3179*404b540aSrobert return NULL_TREE;
3180*404b540aSrobert }
3181*404b540aSrobert }
3182*404b540aSrobert break;
3183*404b540aSrobert /* This is here to make sure we mark the entire base variable as used
3184*404b540aSrobert when you take its address. Because our used portion analysis is
3185*404b540aSrobert simple, we aren't looking at casts or pointer arithmetic to see what
3186*404b540aSrobert happens when you take the address. */
3187*404b540aSrobert case ADDR_EXPR:
3188*404b540aSrobert {
3189*404b540aSrobert tree var = get_base_address (TREE_OPERAND (*tp, 0));
3190*404b540aSrobert
3191*404b540aSrobert if (var
3192*404b540aSrobert && DECL_P (var)
3193*404b540aSrobert && DECL_SIZE (var)
3194*404b540aSrobert && var_can_have_subvars (var)
3195*404b540aSrobert && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3196*404b540aSrobert {
3197*404b540aSrobert used_part_t up;
3198*404b540aSrobert size_t uid = DECL_UID (var);
3199*404b540aSrobert
3200*404b540aSrobert up = get_or_create_used_part_for (uid);
3201*404b540aSrobert
3202*404b540aSrobert up->minused = 0;
3203*404b540aSrobert up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3204*404b540aSrobert up->implicit_uses = true;
3205*404b540aSrobert if (!lhs_p)
3206*404b540aSrobert up->write_only = false;
3207*404b540aSrobert
3208*404b540aSrobert up_insert (uid, up);
3209*404b540aSrobert *walk_subtrees = 0;
3210*404b540aSrobert return NULL_TREE;
3211*404b540aSrobert }
3212*404b540aSrobert }
3213*404b540aSrobert break;
3214*404b540aSrobert case CALL_EXPR:
3215*404b540aSrobert {
3216*404b540aSrobert tree *arg;
3217*404b540aSrobert for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg))
3218*404b540aSrobert {
3219*404b540aSrobert if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR)
3220*404b540aSrobert find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL);
3221*404b540aSrobert }
3222*404b540aSrobert *walk_subtrees = 0;
3223*404b540aSrobert return NULL_TREE;
3224*404b540aSrobert }
3225*404b540aSrobert case VAR_DECL:
3226*404b540aSrobert case PARM_DECL:
3227*404b540aSrobert case RESULT_DECL:
3228*404b540aSrobert {
3229*404b540aSrobert tree var = *tp;
3230*404b540aSrobert if (DECL_SIZE (var)
3231*404b540aSrobert && var_can_have_subvars (var)
3232*404b540aSrobert && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3233*404b540aSrobert {
3234*404b540aSrobert used_part_t up;
3235*404b540aSrobert size_t uid = DECL_UID (var);
3236*404b540aSrobert
3237*404b540aSrobert up = get_or_create_used_part_for (uid);
3238*404b540aSrobert
3239*404b540aSrobert up->minused = 0;
3240*404b540aSrobert up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3241*404b540aSrobert up->implicit_uses = true;
3242*404b540aSrobert
3243*404b540aSrobert up_insert (uid, up);
3244*404b540aSrobert *walk_subtrees = 0;
3245*404b540aSrobert return NULL_TREE;
3246*404b540aSrobert }
3247*404b540aSrobert }
3248*404b540aSrobert break;
3249*404b540aSrobert
3250*404b540aSrobert default:
3251*404b540aSrobert break;
3252*404b540aSrobert
3253*404b540aSrobert }
3254*404b540aSrobert return NULL_TREE;
3255*404b540aSrobert }
3256*404b540aSrobert
3257*404b540aSrobert /* Create structure field variables for structures used in this function. */
3258*404b540aSrobert
3259*404b540aSrobert static unsigned int
create_structure_vars(void)3260*404b540aSrobert create_structure_vars (void)
3261*404b540aSrobert {
3262*404b540aSrobert basic_block bb;
3263*404b540aSrobert safe_referenced_var_iterator rvi;
3264*404b540aSrobert VEC (tree, heap) *varvec = NULL;
3265*404b540aSrobert tree var;
3266*404b540aSrobert
3267*404b540aSrobert used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3268*404b540aSrobert free_used_part_map);
3269*404b540aSrobert
3270*404b540aSrobert FOR_EACH_BB (bb)
3271*404b540aSrobert {
3272*404b540aSrobert block_stmt_iterator bsi;
3273*404b540aSrobert for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3274*404b540aSrobert {
3275*404b540aSrobert walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3276*404b540aSrobert find_used_portions,
3277*404b540aSrobert NULL);
3278*404b540aSrobert }
3279*404b540aSrobert }
3280*404b540aSrobert FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3281*404b540aSrobert {
3282*404b540aSrobert /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3283*404b540aSrobert if (var
3284*404b540aSrobert && DECL_SIZE (var)
3285*404b540aSrobert && var_can_have_subvars (var)
3286*404b540aSrobert && !MTAG_P (var)
3287*404b540aSrobert && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3288*404b540aSrobert create_overlap_variables_for (var);
3289*404b540aSrobert }
3290*404b540aSrobert htab_delete (used_portions);
3291*404b540aSrobert VEC_free (tree, heap, varvec);
3292*404b540aSrobert return 0;
3293*404b540aSrobert }
3294*404b540aSrobert
3295*404b540aSrobert static bool
gate_structure_vars(void)3296*404b540aSrobert gate_structure_vars (void)
3297*404b540aSrobert {
3298*404b540aSrobert return flag_tree_salias != 0;
3299*404b540aSrobert }
3300*404b540aSrobert
3301*404b540aSrobert struct tree_opt_pass pass_create_structure_vars =
3302*404b540aSrobert {
3303*404b540aSrobert "salias", /* name */
3304*404b540aSrobert gate_structure_vars, /* gate */
3305*404b540aSrobert create_structure_vars, /* execute */
3306*404b540aSrobert NULL, /* sub */
3307*404b540aSrobert NULL, /* next */
3308*404b540aSrobert 0, /* static_pass_number */
3309*404b540aSrobert 0, /* tv_id */
3310*404b540aSrobert PROP_cfg, /* properties_required */
3311*404b540aSrobert 0, /* properties_provided */
3312*404b540aSrobert 0, /* properties_destroyed */
3313*404b540aSrobert 0, /* todo_flags_start */
3314*404b540aSrobert TODO_dump_func, /* todo_flags_finish */
3315*404b540aSrobert 0 /* letter */
3316*404b540aSrobert };
3317*404b540aSrobert
3318*404b540aSrobert /* Reset the DECL_CALL_CLOBBERED flags on our referenced vars. In
3319*404b540aSrobert theory, this only needs to be done for globals. */
3320*404b540aSrobert
3321*404b540aSrobert static unsigned int
reset_cc_flags(void)3322*404b540aSrobert reset_cc_flags (void)
3323*404b540aSrobert {
3324*404b540aSrobert tree var;
3325*404b540aSrobert referenced_var_iterator rvi;
3326*404b540aSrobert
3327*404b540aSrobert FOR_EACH_REFERENCED_VAR (var, rvi)
3328*404b540aSrobert DECL_CALL_CLOBBERED (var) = false;
3329*404b540aSrobert return 0;
3330*404b540aSrobert }
3331*404b540aSrobert
3332*404b540aSrobert struct tree_opt_pass pass_reset_cc_flags =
3333*404b540aSrobert {
3334*404b540aSrobert NULL, /* name */
3335*404b540aSrobert NULL, /* gate */
3336*404b540aSrobert reset_cc_flags, /* execute */
3337*404b540aSrobert NULL, /* sub */
3338*404b540aSrobert NULL, /* next */
3339*404b540aSrobert 0, /* static_pass_number */
3340*404b540aSrobert 0, /* tv_id */
3341*404b540aSrobert PROP_referenced_vars |PROP_cfg, /* properties_required */
3342*404b540aSrobert 0, /* properties_provided */
3343*404b540aSrobert 0, /* properties_destroyed */
3344*404b540aSrobert 0, /* todo_flags_start */
3345*404b540aSrobert 0, /* todo_flags_finish */
3346*404b540aSrobert 0 /* letter */
3347*404b540aSrobert };
3348