xref: /dflybsd-src/contrib/gcc-8.0/gcc/tree-dfa.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* Data flow functions for trees.
2*38fd1498Szrj    Copyright (C) 2001-2018 Free Software Foundation, Inc.
3*38fd1498Szrj    Contributed by Diego Novillo <dnovillo@redhat.com>
4*38fd1498Szrj 
5*38fd1498Szrj This file is part of GCC.
6*38fd1498Szrj 
7*38fd1498Szrj GCC is free software; you can redistribute it and/or modify
8*38fd1498Szrj it under the terms of the GNU General Public License as published by
9*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
10*38fd1498Szrj any later version.
11*38fd1498Szrj 
12*38fd1498Szrj GCC is distributed in the hope that it will be useful,
13*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
14*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15*38fd1498Szrj GNU General Public License for more details.
16*38fd1498Szrj 
17*38fd1498Szrj You should have received a copy of the GNU General Public License
18*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
19*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
20*38fd1498Szrj 
21*38fd1498Szrj #include "config.h"
22*38fd1498Szrj #include "system.h"
23*38fd1498Szrj #include "coretypes.h"
24*38fd1498Szrj #include "backend.h"
25*38fd1498Szrj #include "rtl.h"
26*38fd1498Szrj #include "tree.h"
27*38fd1498Szrj #include "gimple.h"
28*38fd1498Szrj #include "tree-pass.h"
29*38fd1498Szrj #include "ssa.h"
30*38fd1498Szrj #include "tree-pretty-print.h"
31*38fd1498Szrj #include "fold-const.h"
32*38fd1498Szrj #include "stor-layout.h"
33*38fd1498Szrj #include "langhooks.h"
34*38fd1498Szrj #include "gimple-iterator.h"
35*38fd1498Szrj #include "gimple-walk.h"
36*38fd1498Szrj #include "tree-dfa.h"
37*38fd1498Szrj 
38*38fd1498Szrj /* Build and maintain data flow information for trees.  */
39*38fd1498Szrj 
40*38fd1498Szrj /* Counters used to display DFA and SSA statistics.  */
41*38fd1498Szrj struct dfa_stats_d
42*38fd1498Szrj {
43*38fd1498Szrj   long num_defs;
44*38fd1498Szrj   long num_uses;
45*38fd1498Szrj   long num_phis;
46*38fd1498Szrj   long num_phi_args;
47*38fd1498Szrj   size_t max_num_phi_args;
48*38fd1498Szrj   long num_vdefs;
49*38fd1498Szrj   long num_vuses;
50*38fd1498Szrj };
51*38fd1498Szrj 
52*38fd1498Szrj 
53*38fd1498Szrj /* Local functions.  */
54*38fd1498Szrj static void collect_dfa_stats (struct dfa_stats_d *);
55*38fd1498Szrj 
56*38fd1498Szrj 
57*38fd1498Szrj /*---------------------------------------------------------------------------
58*38fd1498Szrj 			Dataflow analysis (DFA) routines
59*38fd1498Szrj ---------------------------------------------------------------------------*/
60*38fd1498Szrj 
61*38fd1498Szrj /* Renumber all of the gimple stmt uids.  */
62*38fd1498Szrj 
63*38fd1498Szrj void
renumber_gimple_stmt_uids(void)64*38fd1498Szrj renumber_gimple_stmt_uids (void)
65*38fd1498Szrj {
66*38fd1498Szrj   basic_block bb;
67*38fd1498Szrj 
68*38fd1498Szrj   set_gimple_stmt_max_uid (cfun, 0);
69*38fd1498Szrj   FOR_ALL_BB_FN (bb, cfun)
70*38fd1498Szrj     {
71*38fd1498Szrj       gimple_stmt_iterator bsi;
72*38fd1498Szrj       for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
73*38fd1498Szrj 	{
74*38fd1498Szrj 	  gimple *stmt = gsi_stmt (bsi);
75*38fd1498Szrj 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
76*38fd1498Szrj 	}
77*38fd1498Szrj       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
78*38fd1498Szrj 	{
79*38fd1498Szrj 	  gimple *stmt = gsi_stmt (bsi);
80*38fd1498Szrj 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
81*38fd1498Szrj 	}
82*38fd1498Szrj     }
83*38fd1498Szrj }
84*38fd1498Szrj 
85*38fd1498Szrj /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
86*38fd1498Szrj    in BLOCKS, of which there are N_BLOCKS.  Also renumbers PHIs.  */
87*38fd1498Szrj 
88*38fd1498Szrj void
renumber_gimple_stmt_uids_in_blocks(basic_block * blocks,int n_blocks)89*38fd1498Szrj renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
90*38fd1498Szrj {
91*38fd1498Szrj   int i;
92*38fd1498Szrj 
93*38fd1498Szrj   set_gimple_stmt_max_uid (cfun, 0);
94*38fd1498Szrj   for (i = 0; i < n_blocks; i++)
95*38fd1498Szrj     {
96*38fd1498Szrj       basic_block bb = blocks[i];
97*38fd1498Szrj       gimple_stmt_iterator bsi;
98*38fd1498Szrj       for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
99*38fd1498Szrj 	{
100*38fd1498Szrj 	  gimple *stmt = gsi_stmt (bsi);
101*38fd1498Szrj 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
102*38fd1498Szrj 	}
103*38fd1498Szrj       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
104*38fd1498Szrj 	{
105*38fd1498Szrj 	  gimple *stmt = gsi_stmt (bsi);
106*38fd1498Szrj 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
107*38fd1498Szrj 	}
108*38fd1498Szrj     }
109*38fd1498Szrj }
110*38fd1498Szrj 
111*38fd1498Szrj 
112*38fd1498Szrj 
113*38fd1498Szrj /*---------------------------------------------------------------------------
114*38fd1498Szrj 			      Debugging functions
115*38fd1498Szrj ---------------------------------------------------------------------------*/
116*38fd1498Szrj 
117*38fd1498Szrj /* Dump variable VAR and its may-aliases to FILE.  */
118*38fd1498Szrj 
119*38fd1498Szrj void
dump_variable(FILE * file,tree var)120*38fd1498Szrj dump_variable (FILE *file, tree var)
121*38fd1498Szrj {
122*38fd1498Szrj   if (TREE_CODE (var) == SSA_NAME)
123*38fd1498Szrj     {
124*38fd1498Szrj       if (POINTER_TYPE_P (TREE_TYPE (var)))
125*38fd1498Szrj 	dump_points_to_info_for (file, var);
126*38fd1498Szrj       var = SSA_NAME_VAR (var);
127*38fd1498Szrj     }
128*38fd1498Szrj 
129*38fd1498Szrj   if (var == NULL_TREE)
130*38fd1498Szrj     {
131*38fd1498Szrj       fprintf (file, "<nil>");
132*38fd1498Szrj       return;
133*38fd1498Szrj     }
134*38fd1498Szrj 
135*38fd1498Szrj   print_generic_expr (file, var, dump_flags);
136*38fd1498Szrj 
137*38fd1498Szrj   fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
138*38fd1498Szrj   if (DECL_PT_UID (var) != DECL_UID (var))
139*38fd1498Szrj     fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
140*38fd1498Szrj 
141*38fd1498Szrj   fprintf (file, ", ");
142*38fd1498Szrj   print_generic_expr (file, TREE_TYPE (var), dump_flags);
143*38fd1498Szrj 
144*38fd1498Szrj   if (TREE_ADDRESSABLE (var))
145*38fd1498Szrj     fprintf (file, ", is addressable");
146*38fd1498Szrj 
147*38fd1498Szrj   if (is_global_var (var))
148*38fd1498Szrj     fprintf (file, ", is global");
149*38fd1498Szrj 
150*38fd1498Szrj   if (TREE_THIS_VOLATILE (var))
151*38fd1498Szrj     fprintf (file, ", is volatile");
152*38fd1498Szrj 
153*38fd1498Szrj   if (cfun && ssa_default_def (cfun, var))
154*38fd1498Szrj     {
155*38fd1498Szrj       fprintf (file, ", default def: ");
156*38fd1498Szrj       print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
157*38fd1498Szrj     }
158*38fd1498Szrj 
159*38fd1498Szrj   if (DECL_INITIAL (var))
160*38fd1498Szrj     {
161*38fd1498Szrj       fprintf (file, ", initial: ");
162*38fd1498Szrj       print_generic_expr (file, DECL_INITIAL (var), dump_flags);
163*38fd1498Szrj     }
164*38fd1498Szrj 
165*38fd1498Szrj   fprintf (file, "\n");
166*38fd1498Szrj }
167*38fd1498Szrj 
168*38fd1498Szrj 
169*38fd1498Szrj /* Dump variable VAR and its may-aliases to stderr.  */
170*38fd1498Szrj 
171*38fd1498Szrj DEBUG_FUNCTION void
debug_variable(tree var)172*38fd1498Szrj debug_variable (tree var)
173*38fd1498Szrj {
174*38fd1498Szrj   dump_variable (stderr, var);
175*38fd1498Szrj }
176*38fd1498Szrj 
177*38fd1498Szrj 
178*38fd1498Szrj /* Dump various DFA statistics to FILE.  */
179*38fd1498Szrj 
180*38fd1498Szrj void
dump_dfa_stats(FILE * file)181*38fd1498Szrj dump_dfa_stats (FILE *file)
182*38fd1498Szrj {
183*38fd1498Szrj   struct dfa_stats_d dfa_stats;
184*38fd1498Szrj 
185*38fd1498Szrj   unsigned long size, total = 0;
186*38fd1498Szrj   const char * const fmt_str   = "%-30s%-13s%12s\n";
187*38fd1498Szrj   const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
188*38fd1498Szrj   const char * const fmt_str_3 = "%-43s%11lu%c\n";
189*38fd1498Szrj   const char *funcname
190*38fd1498Szrj     = lang_hooks.decl_printable_name (current_function_decl, 2);
191*38fd1498Szrj 
192*38fd1498Szrj   collect_dfa_stats (&dfa_stats);
193*38fd1498Szrj 
194*38fd1498Szrj   fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
195*38fd1498Szrj 
196*38fd1498Szrj   fprintf (file, "---------------------------------------------------------\n");
197*38fd1498Szrj   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
198*38fd1498Szrj   fprintf (file, fmt_str, "", "  instances  ", "used ");
199*38fd1498Szrj   fprintf (file, "---------------------------------------------------------\n");
200*38fd1498Szrj 
201*38fd1498Szrj   size = dfa_stats.num_uses * sizeof (tree *);
202*38fd1498Szrj   total += size;
203*38fd1498Szrj   fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
204*38fd1498Szrj 	   SCALE (size), LABEL (size));
205*38fd1498Szrj 
206*38fd1498Szrj   size = dfa_stats.num_defs * sizeof (tree *);
207*38fd1498Szrj   total += size;
208*38fd1498Szrj   fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
209*38fd1498Szrj 	   SCALE (size), LABEL (size));
210*38fd1498Szrj 
211*38fd1498Szrj   size = dfa_stats.num_vuses * sizeof (tree *);
212*38fd1498Szrj   total += size;
213*38fd1498Szrj   fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
214*38fd1498Szrj 	   SCALE (size), LABEL (size));
215*38fd1498Szrj 
216*38fd1498Szrj   size = dfa_stats.num_vdefs * sizeof (tree *);
217*38fd1498Szrj   total += size;
218*38fd1498Szrj   fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
219*38fd1498Szrj 	   SCALE (size), LABEL (size));
220*38fd1498Szrj 
221*38fd1498Szrj   size = dfa_stats.num_phis * sizeof (struct gphi);
222*38fd1498Szrj   total += size;
223*38fd1498Szrj   fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
224*38fd1498Szrj 	   SCALE (size), LABEL (size));
225*38fd1498Szrj 
226*38fd1498Szrj   size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
227*38fd1498Szrj   total += size;
228*38fd1498Szrj   fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
229*38fd1498Szrj  	   SCALE (size), LABEL (size));
230*38fd1498Szrj 
231*38fd1498Szrj   fprintf (file, "---------------------------------------------------------\n");
232*38fd1498Szrj   fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", SCALE (total),
233*38fd1498Szrj 	   LABEL (total));
234*38fd1498Szrj   fprintf (file, "---------------------------------------------------------\n");
235*38fd1498Szrj   fprintf (file, "\n");
236*38fd1498Szrj 
237*38fd1498Szrj   if (dfa_stats.num_phis)
238*38fd1498Szrj     fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
239*38fd1498Szrj 	     (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
240*38fd1498Szrj 	     (long) dfa_stats.max_num_phi_args);
241*38fd1498Szrj 
242*38fd1498Szrj   fprintf (file, "\n");
243*38fd1498Szrj }
244*38fd1498Szrj 
245*38fd1498Szrj 
246*38fd1498Szrj /* Dump DFA statistics on stderr.  */
247*38fd1498Szrj 
248*38fd1498Szrj DEBUG_FUNCTION void
debug_dfa_stats(void)249*38fd1498Szrj debug_dfa_stats (void)
250*38fd1498Szrj {
251*38fd1498Szrj   dump_dfa_stats (stderr);
252*38fd1498Szrj }
253*38fd1498Szrj 
254*38fd1498Szrj 
255*38fd1498Szrj /* Collect DFA statistics and store them in the structure pointed to by
256*38fd1498Szrj    DFA_STATS_P.  */
257*38fd1498Szrj 
258*38fd1498Szrj static void
collect_dfa_stats(struct dfa_stats_d * dfa_stats_p ATTRIBUTE_UNUSED)259*38fd1498Szrj collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
260*38fd1498Szrj {
261*38fd1498Szrj   basic_block bb;
262*38fd1498Szrj 
263*38fd1498Szrj   gcc_assert (dfa_stats_p);
264*38fd1498Szrj 
265*38fd1498Szrj   memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
266*38fd1498Szrj 
267*38fd1498Szrj   /* Walk all the statements in the function counting references.  */
268*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
269*38fd1498Szrj     {
270*38fd1498Szrj       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
271*38fd1498Szrj 	   gsi_next (&si))
272*38fd1498Szrj 	{
273*38fd1498Szrj 	  gphi *phi = si.phi ();
274*38fd1498Szrj 	  dfa_stats_p->num_phis++;
275*38fd1498Szrj 	  dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
276*38fd1498Szrj 	  if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
277*38fd1498Szrj 	    dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
278*38fd1498Szrj 	}
279*38fd1498Szrj 
280*38fd1498Szrj       for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
281*38fd1498Szrj 	   gsi_next (&si))
282*38fd1498Szrj 	{
283*38fd1498Szrj 	  gimple *stmt = gsi_stmt (si);
284*38fd1498Szrj 	  dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
285*38fd1498Szrj 	  dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
286*38fd1498Szrj 	  dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
287*38fd1498Szrj 	  dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
288*38fd1498Szrj 	}
289*38fd1498Szrj     }
290*38fd1498Szrj }
291*38fd1498Szrj 
292*38fd1498Szrj 
293*38fd1498Szrj /*---------------------------------------------------------------------------
294*38fd1498Szrj 			     Miscellaneous helpers
295*38fd1498Szrj ---------------------------------------------------------------------------*/
296*38fd1498Szrj 
297*38fd1498Szrj /* Lookup VAR UID in the default_defs hashtable and return the associated
298*38fd1498Szrj    variable.  */
299*38fd1498Szrj 
300*38fd1498Szrj tree
ssa_default_def(struct function * fn,tree var)301*38fd1498Szrj ssa_default_def (struct function *fn, tree var)
302*38fd1498Szrj {
303*38fd1498Szrj   struct tree_decl_minimal ind;
304*38fd1498Szrj   struct tree_ssa_name in;
305*38fd1498Szrj   gcc_assert (VAR_P (var)
306*38fd1498Szrj 	      || TREE_CODE (var) == PARM_DECL
307*38fd1498Szrj 	      || TREE_CODE (var) == RESULT_DECL);
308*38fd1498Szrj 
309*38fd1498Szrj   /* Always NULL_TREE for rtl function dumps.  */
310*38fd1498Szrj   if (!fn->gimple_df)
311*38fd1498Szrj     return NULL_TREE;
312*38fd1498Szrj 
313*38fd1498Szrj   in.var = (tree)&ind;
314*38fd1498Szrj   ind.uid = DECL_UID (var);
315*38fd1498Szrj   return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var));
316*38fd1498Szrj }
317*38fd1498Szrj 
318*38fd1498Szrj /* Insert the pair VAR's UID, DEF into the default_defs hashtable
319*38fd1498Szrj    of function FN.  */
320*38fd1498Szrj 
321*38fd1498Szrj void
set_ssa_default_def(struct function * fn,tree var,tree def)322*38fd1498Szrj set_ssa_default_def (struct function *fn, tree var, tree def)
323*38fd1498Szrj {
324*38fd1498Szrj   struct tree_decl_minimal ind;
325*38fd1498Szrj   struct tree_ssa_name in;
326*38fd1498Szrj 
327*38fd1498Szrj   gcc_assert (VAR_P (var)
328*38fd1498Szrj 	      || TREE_CODE (var) == PARM_DECL
329*38fd1498Szrj 	      || TREE_CODE (var) == RESULT_DECL);
330*38fd1498Szrj   in.var = (tree)&ind;
331*38fd1498Szrj   ind.uid = DECL_UID (var);
332*38fd1498Szrj   if (!def)
333*38fd1498Szrj     {
334*38fd1498Szrj       tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
335*38fd1498Szrj 							  DECL_UID (var),
336*38fd1498Szrj 							  NO_INSERT);
337*38fd1498Szrj       if (loc)
338*38fd1498Szrj 	{
339*38fd1498Szrj 	  SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
340*38fd1498Szrj 	  DEFAULT_DEFS (fn)->clear_slot (loc);
341*38fd1498Szrj 	}
342*38fd1498Szrj       return;
343*38fd1498Szrj     }
344*38fd1498Szrj   gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
345*38fd1498Szrj   tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
346*38fd1498Szrj 						      DECL_UID (var), INSERT);
347*38fd1498Szrj 
348*38fd1498Szrj   /* Default definition might be changed by tail call optimization.  */
349*38fd1498Szrj   if (*loc)
350*38fd1498Szrj     SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
351*38fd1498Szrj 
352*38fd1498Szrj    /* Mark DEF as the default definition for VAR.  */
353*38fd1498Szrj   *loc = def;
354*38fd1498Szrj   SSA_NAME_IS_DEFAULT_DEF (def) = true;
355*38fd1498Szrj }
356*38fd1498Szrj 
357*38fd1498Szrj /* Retrieve or create a default definition for VAR.  */
358*38fd1498Szrj 
359*38fd1498Szrj tree
get_or_create_ssa_default_def(struct function * fn,tree var)360*38fd1498Szrj get_or_create_ssa_default_def (struct function *fn, tree var)
361*38fd1498Szrj {
362*38fd1498Szrj   tree ddef = ssa_default_def (fn, var);
363*38fd1498Szrj   if (ddef == NULL_TREE)
364*38fd1498Szrj     {
365*38fd1498Szrj       ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
366*38fd1498Szrj       set_ssa_default_def (fn, var, ddef);
367*38fd1498Szrj     }
368*38fd1498Szrj   return ddef;
369*38fd1498Szrj }
370*38fd1498Szrj 
371*38fd1498Szrj 
372*38fd1498Szrj /* If EXP is a handled component reference for a structure, return the
373*38fd1498Szrj    base variable.  The access range is delimited by bit positions *POFFSET and
374*38fd1498Szrj    *POFFSET + *PMAX_SIZE.  The access size is *PSIZE bits.  If either
375*38fd1498Szrj    *PSIZE or *PMAX_SIZE is -1, they could not be determined.  If *PSIZE
376*38fd1498Szrj    and *PMAX_SIZE are equal, the access is non-variable.  If *PREVERSE is
377*38fd1498Szrj    true, the storage order of the reference is reversed.  */
378*38fd1498Szrj 
379*38fd1498Szrj tree
get_ref_base_and_extent(tree exp,poly_int64_pod * poffset,poly_int64_pod * psize,poly_int64_pod * pmax_size,bool * preverse)380*38fd1498Szrj get_ref_base_and_extent (tree exp, poly_int64_pod *poffset,
381*38fd1498Szrj 			 poly_int64_pod *psize,
382*38fd1498Szrj 			 poly_int64_pod *pmax_size,
383*38fd1498Szrj 			 bool *preverse)
384*38fd1498Szrj {
385*38fd1498Szrj   poly_offset_int bitsize = -1;
386*38fd1498Szrj   poly_offset_int maxsize;
387*38fd1498Szrj   tree size_tree = NULL_TREE;
388*38fd1498Szrj   poly_offset_int bit_offset = 0;
389*38fd1498Szrj   bool seen_variable_array_ref = false;
390*38fd1498Szrj 
391*38fd1498Szrj   /* First get the final access size and the storage order from just the
392*38fd1498Szrj      outermost expression.  */
393*38fd1498Szrj   if (TREE_CODE (exp) == COMPONENT_REF)
394*38fd1498Szrj     size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
395*38fd1498Szrj   else if (TREE_CODE (exp) == BIT_FIELD_REF)
396*38fd1498Szrj     size_tree = TREE_OPERAND (exp, 1);
397*38fd1498Szrj   else if (!VOID_TYPE_P (TREE_TYPE (exp)))
398*38fd1498Szrj     {
399*38fd1498Szrj       machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
400*38fd1498Szrj       if (mode == BLKmode)
401*38fd1498Szrj 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
402*38fd1498Szrj       else
403*38fd1498Szrj 	bitsize = GET_MODE_BITSIZE (mode);
404*38fd1498Szrj     }
405*38fd1498Szrj   if (size_tree != NULL_TREE
406*38fd1498Szrj       && poly_int_tree_p (size_tree))
407*38fd1498Szrj     bitsize = wi::to_poly_offset (size_tree);
408*38fd1498Szrj 
409*38fd1498Szrj   *preverse = reverse_storage_order_for_component_p (exp);
410*38fd1498Szrj 
411*38fd1498Szrj   /* Initially, maxsize is the same as the accessed element size.
412*38fd1498Szrj      In the following it will only grow (or become -1).  */
413*38fd1498Szrj   maxsize = bitsize;
414*38fd1498Szrj 
415*38fd1498Szrj   /* Compute cumulative bit-offset for nested component-refs and array-refs,
416*38fd1498Szrj      and find the ultimate containing object.  */
417*38fd1498Szrj   while (1)
418*38fd1498Szrj     {
419*38fd1498Szrj       switch (TREE_CODE (exp))
420*38fd1498Szrj 	{
421*38fd1498Szrj 	case BIT_FIELD_REF:
422*38fd1498Szrj 	  bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
423*38fd1498Szrj 	  break;
424*38fd1498Szrj 
425*38fd1498Szrj 	case COMPONENT_REF:
426*38fd1498Szrj 	  {
427*38fd1498Szrj 	    tree field = TREE_OPERAND (exp, 1);
428*38fd1498Szrj 	    tree this_offset = component_ref_field_offset (exp);
429*38fd1498Szrj 
430*38fd1498Szrj 	    if (this_offset && poly_int_tree_p (this_offset))
431*38fd1498Szrj 	      {
432*38fd1498Szrj 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
433*38fd1498Szrj 					   << LOG2_BITS_PER_UNIT);
434*38fd1498Szrj 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
435*38fd1498Szrj 		bit_offset += woffset;
436*38fd1498Szrj 
437*38fd1498Szrj 		/* If we had seen a variable array ref already and we just
438*38fd1498Szrj 		   referenced the last field of a struct or a union member
439*38fd1498Szrj 		   then we have to adjust maxsize by the padding at the end
440*38fd1498Szrj 		   of our field.  */
441*38fd1498Szrj 		if (seen_variable_array_ref)
442*38fd1498Szrj 		  {
443*38fd1498Szrj 		    tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
444*38fd1498Szrj 		    tree next = DECL_CHAIN (field);
445*38fd1498Szrj 		    while (next && TREE_CODE (next) != FIELD_DECL)
446*38fd1498Szrj 		      next = DECL_CHAIN (next);
447*38fd1498Szrj 		    if (!next
448*38fd1498Szrj 			|| TREE_CODE (stype) != RECORD_TYPE)
449*38fd1498Szrj 		      {
450*38fd1498Szrj 			tree fsize = DECL_SIZE_UNIT (field);
451*38fd1498Szrj 			tree ssize = TYPE_SIZE_UNIT (stype);
452*38fd1498Szrj 			if (fsize == NULL
453*38fd1498Szrj 			    || !poly_int_tree_p (fsize)
454*38fd1498Szrj 			    || ssize == NULL
455*38fd1498Szrj 			    || !poly_int_tree_p (ssize))
456*38fd1498Szrj 			  maxsize = -1;
457*38fd1498Szrj 			else if (known_size_p (maxsize))
458*38fd1498Szrj 			  {
459*38fd1498Szrj 			    poly_offset_int tem
460*38fd1498Szrj 			      = (wi::to_poly_offset (ssize)
461*38fd1498Szrj 				 - wi::to_poly_offset (fsize));
462*38fd1498Szrj 			    tem <<= LOG2_BITS_PER_UNIT;
463*38fd1498Szrj 			    tem -= woffset;
464*38fd1498Szrj 			    maxsize += tem;
465*38fd1498Szrj 			  }
466*38fd1498Szrj 		      }
467*38fd1498Szrj 		    /* An component ref with an adjacent field up in the
468*38fd1498Szrj 		       structure hierarchy constrains the size of any variable
469*38fd1498Szrj 		       array ref lower in the access hierarchy.  */
470*38fd1498Szrj 		    else
471*38fd1498Szrj 		      seen_variable_array_ref = false;
472*38fd1498Szrj 		  }
473*38fd1498Szrj 	      }
474*38fd1498Szrj 	    else
475*38fd1498Szrj 	      {
476*38fd1498Szrj 		tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
477*38fd1498Szrj 		/* We need to adjust maxsize to the whole structure bitsize.
478*38fd1498Szrj 		   But we can subtract any constant offset seen so far,
479*38fd1498Szrj 		   because that would get us out of the structure otherwise.  */
480*38fd1498Szrj 		if (known_size_p (maxsize)
481*38fd1498Szrj 		    && csize
482*38fd1498Szrj 		    && poly_int_tree_p (csize))
483*38fd1498Szrj 		  maxsize = wi::to_poly_offset (csize) - bit_offset;
484*38fd1498Szrj 		else
485*38fd1498Szrj 		  maxsize = -1;
486*38fd1498Szrj 	      }
487*38fd1498Szrj 	  }
488*38fd1498Szrj 	  break;
489*38fd1498Szrj 
490*38fd1498Szrj 	case ARRAY_REF:
491*38fd1498Szrj 	case ARRAY_RANGE_REF:
492*38fd1498Szrj 	  {
493*38fd1498Szrj 	    tree index = TREE_OPERAND (exp, 1);
494*38fd1498Szrj 	    tree low_bound, unit_size;
495*38fd1498Szrj 
496*38fd1498Szrj 	    /* If the resulting bit-offset is constant, track it.  */
497*38fd1498Szrj 	    if (poly_int_tree_p (index)
498*38fd1498Szrj 		&& (low_bound = array_ref_low_bound (exp),
499*38fd1498Szrj 		    poly_int_tree_p (low_bound))
500*38fd1498Szrj 		&& (unit_size = array_ref_element_size (exp),
501*38fd1498Szrj 		    TREE_CODE (unit_size) == INTEGER_CST))
502*38fd1498Szrj 	      {
503*38fd1498Szrj 		poly_offset_int woffset
504*38fd1498Szrj 		  = wi::sext (wi::to_poly_offset (index)
505*38fd1498Szrj 			      - wi::to_poly_offset (low_bound),
506*38fd1498Szrj 			      TYPE_PRECISION (TREE_TYPE (index)));
507*38fd1498Szrj 		woffset *= wi::to_offset (unit_size);
508*38fd1498Szrj 		woffset <<= LOG2_BITS_PER_UNIT;
509*38fd1498Szrj 		bit_offset += woffset;
510*38fd1498Szrj 
511*38fd1498Szrj 		/* An array ref with a constant index up in the structure
512*38fd1498Szrj 		   hierarchy will constrain the size of any variable array ref
513*38fd1498Szrj 		   lower in the access hierarchy.  */
514*38fd1498Szrj 		seen_variable_array_ref = false;
515*38fd1498Szrj 	      }
516*38fd1498Szrj 	    else
517*38fd1498Szrj 	      {
518*38fd1498Szrj 		tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
519*38fd1498Szrj 		/* We need to adjust maxsize to the whole array bitsize.
520*38fd1498Szrj 		   But we can subtract any constant offset seen so far,
521*38fd1498Szrj 		   because that would get us outside of the array otherwise.  */
522*38fd1498Szrj 		if (known_size_p (maxsize)
523*38fd1498Szrj 		    && asize
524*38fd1498Szrj 		    && poly_int_tree_p (asize))
525*38fd1498Szrj 		  maxsize = wi::to_poly_offset (asize) - bit_offset;
526*38fd1498Szrj 		else
527*38fd1498Szrj 		  maxsize = -1;
528*38fd1498Szrj 
529*38fd1498Szrj 		/* Remember that we have seen an array ref with a variable
530*38fd1498Szrj 		   index.  */
531*38fd1498Szrj 		seen_variable_array_ref = true;
532*38fd1498Szrj 	      }
533*38fd1498Szrj 	  }
534*38fd1498Szrj 	  break;
535*38fd1498Szrj 
536*38fd1498Szrj 	case REALPART_EXPR:
537*38fd1498Szrj 	  break;
538*38fd1498Szrj 
539*38fd1498Szrj 	case IMAGPART_EXPR:
540*38fd1498Szrj 	  bit_offset += bitsize;
541*38fd1498Szrj 	  break;
542*38fd1498Szrj 
543*38fd1498Szrj 	case VIEW_CONVERT_EXPR:
544*38fd1498Szrj 	  break;
545*38fd1498Szrj 
546*38fd1498Szrj 	case TARGET_MEM_REF:
547*38fd1498Szrj 	  /* Via the variable index or index2 we can reach the
548*38fd1498Szrj 	     whole object.  Still hand back the decl here.  */
549*38fd1498Szrj 	  if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
550*38fd1498Szrj 	      && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
551*38fd1498Szrj 	    {
552*38fd1498Szrj 	      exp = TREE_OPERAND (TMR_BASE (exp), 0);
553*38fd1498Szrj 	      bit_offset = 0;
554*38fd1498Szrj 	      maxsize = -1;
555*38fd1498Szrj 	      goto done;
556*38fd1498Szrj 	    }
557*38fd1498Szrj 	  /* Fallthru.  */
558*38fd1498Szrj 	case MEM_REF:
559*38fd1498Szrj 	  /* We need to deal with variable arrays ending structures such as
560*38fd1498Szrj 	     struct { int length; int a[1]; } x;           x.a[d]
561*38fd1498Szrj 	     struct { struct { int a; int b; } a[1]; } x;  x.a[d].a
562*38fd1498Szrj 	     struct { struct { int a[1]; } a[1]; } x;      x.a[0][d], x.a[d][0]
563*38fd1498Szrj 	     struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
564*38fd1498Szrj 	     where we do not know maxsize for variable index accesses to
565*38fd1498Szrj 	     the array.  The simplest way to conservatively deal with this
566*38fd1498Szrj 	     is to punt in the case that offset + maxsize reaches the
567*38fd1498Szrj 	     base type boundary.  This needs to include possible trailing
568*38fd1498Szrj 	     padding that is there for alignment purposes.  */
569*38fd1498Szrj 	  if (seen_variable_array_ref
570*38fd1498Szrj 	      && known_size_p (maxsize)
571*38fd1498Szrj 	      && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
572*38fd1498Szrj 		  || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
573*38fd1498Szrj 		  || (maybe_eq
574*38fd1498Szrj 		      (bit_offset + maxsize,
575*38fd1498Szrj 		       wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
576*38fd1498Szrj 	    maxsize = -1;
577*38fd1498Szrj 
578*38fd1498Szrj 	  /* Hand back the decl for MEM[&decl, off].  */
579*38fd1498Szrj 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
580*38fd1498Szrj 	    {
581*38fd1498Szrj 	      if (integer_zerop (TREE_OPERAND (exp, 1)))
582*38fd1498Szrj 		exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
583*38fd1498Szrj 	      else
584*38fd1498Szrj 		{
585*38fd1498Szrj 		  poly_offset_int off = mem_ref_offset (exp);
586*38fd1498Szrj 		  off <<= LOG2_BITS_PER_UNIT;
587*38fd1498Szrj 		  off += bit_offset;
588*38fd1498Szrj 		  poly_int64 off_hwi;
589*38fd1498Szrj 		  if (off.to_shwi (&off_hwi))
590*38fd1498Szrj 		    {
591*38fd1498Szrj 		      bit_offset = off_hwi;
592*38fd1498Szrj 		      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
593*38fd1498Szrj 		    }
594*38fd1498Szrj 		}
595*38fd1498Szrj 	    }
596*38fd1498Szrj 	  goto done;
597*38fd1498Szrj 
598*38fd1498Szrj 	default:
599*38fd1498Szrj 	  goto done;
600*38fd1498Szrj 	}
601*38fd1498Szrj 
602*38fd1498Szrj       exp = TREE_OPERAND (exp, 0);
603*38fd1498Szrj     }
604*38fd1498Szrj 
605*38fd1498Szrj  done:
606*38fd1498Szrj   if (!bitsize.to_shwi (psize) || maybe_lt (*psize, 0))
607*38fd1498Szrj     {
608*38fd1498Szrj       *poffset = 0;
609*38fd1498Szrj       *psize = -1;
610*38fd1498Szrj       *pmax_size = -1;
611*38fd1498Szrj 
612*38fd1498Szrj       return exp;
613*38fd1498Szrj     }
614*38fd1498Szrj 
615*38fd1498Szrj   /* ???  Due to negative offsets in ARRAY_REF we can end up with
616*38fd1498Szrj      negative bit_offset here.  We might want to store a zero offset
617*38fd1498Szrj      in this case.  */
618*38fd1498Szrj   if (!bit_offset.to_shwi (poffset))
619*38fd1498Szrj     {
620*38fd1498Szrj       *poffset = 0;
621*38fd1498Szrj       *pmax_size = -1;
622*38fd1498Szrj 
623*38fd1498Szrj       return exp;
624*38fd1498Szrj     }
625*38fd1498Szrj 
626*38fd1498Szrj   /* In case of a decl or constant base object we can do better.  */
627*38fd1498Szrj 
628*38fd1498Szrj   if (DECL_P (exp))
629*38fd1498Szrj     {
630*38fd1498Szrj       if (VAR_P (exp)
631*38fd1498Szrj 	  && ((flag_unconstrained_commons && DECL_COMMON (exp))
632*38fd1498Szrj 	      || (DECL_EXTERNAL (exp) && seen_variable_array_ref)))
633*38fd1498Szrj 	{
634*38fd1498Szrj 	  tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
635*38fd1498Szrj 	  /* If size is unknown, or we have read to the end, assume there
636*38fd1498Szrj 	     may be more to the structure than we are told.  */
637*38fd1498Szrj 	  if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
638*38fd1498Szrj 	      || (seen_variable_array_ref
639*38fd1498Szrj 		  && (sz_tree == NULL_TREE
640*38fd1498Szrj 		      || !poly_int_tree_p (sz_tree)
641*38fd1498Szrj 		      || maybe_eq (bit_offset + maxsize,
642*38fd1498Szrj 				   wi::to_poly_offset (sz_tree)))))
643*38fd1498Szrj 	    maxsize = -1;
644*38fd1498Szrj 	}
645*38fd1498Szrj       /* If maxsize is unknown adjust it according to the size of the
646*38fd1498Szrj          base decl.  */
647*38fd1498Szrj       else if (!known_size_p (maxsize)
648*38fd1498Szrj 	       && DECL_SIZE (exp)
649*38fd1498Szrj 	       && poly_int_tree_p (DECL_SIZE (exp)))
650*38fd1498Szrj 	maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
651*38fd1498Szrj     }
652*38fd1498Szrj   else if (CONSTANT_CLASS_P (exp))
653*38fd1498Szrj     {
654*38fd1498Szrj       /* If maxsize is unknown adjust it according to the size of the
655*38fd1498Szrj          base type constant.  */
656*38fd1498Szrj       if (!known_size_p (maxsize)
657*38fd1498Szrj 	  && TYPE_SIZE (TREE_TYPE (exp))
658*38fd1498Szrj 	  && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
659*38fd1498Szrj 	maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
660*38fd1498Szrj 		   - bit_offset);
661*38fd1498Szrj     }
662*38fd1498Szrj 
663*38fd1498Szrj   if (!maxsize.to_shwi (pmax_size)
664*38fd1498Szrj       || maybe_lt (*pmax_size, 0)
665*38fd1498Szrj       || !endpoint_representable_p (*poffset, *pmax_size))
666*38fd1498Szrj     *pmax_size = -1;
667*38fd1498Szrj 
668*38fd1498Szrj   /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
669*38fd1498Szrj      check for such overflows individually and assume it works.  */
670*38fd1498Szrj   if (!endpoint_representable_p (*poffset, *psize))
671*38fd1498Szrj     {
672*38fd1498Szrj       *poffset = 0;
673*38fd1498Szrj       *psize = -1;
674*38fd1498Szrj       *pmax_size = -1;
675*38fd1498Szrj 
676*38fd1498Szrj       return exp;
677*38fd1498Szrj     }
678*38fd1498Szrj 
679*38fd1498Szrj   return exp;
680*38fd1498Szrj }
681*38fd1498Szrj 
682*38fd1498Szrj /* Like get_ref_base_and_extent, but for cases in which we only care
683*38fd1498Szrj    about constant-width accesses at constant offsets.  Return null
684*38fd1498Szrj    if the access is anything else.  */
685*38fd1498Szrj 
686*38fd1498Szrj tree
get_ref_base_and_extent_hwi(tree exp,HOST_WIDE_INT * poffset,HOST_WIDE_INT * psize,bool * preverse)687*38fd1498Szrj get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
688*38fd1498Szrj 			     HOST_WIDE_INT *psize, bool *preverse)
689*38fd1498Szrj {
690*38fd1498Szrj   poly_int64 offset, size, max_size;
691*38fd1498Szrj   HOST_WIDE_INT const_offset, const_size;
692*38fd1498Szrj   bool reverse;
693*38fd1498Szrj   tree decl = get_ref_base_and_extent (exp, &offset, &size, &max_size,
694*38fd1498Szrj 				       &reverse);
695*38fd1498Szrj   if (!offset.is_constant (&const_offset)
696*38fd1498Szrj       || !size.is_constant (&const_size)
697*38fd1498Szrj       || const_offset < 0
698*38fd1498Szrj       || !known_size_p (max_size)
699*38fd1498Szrj       || maybe_ne (max_size, const_size))
700*38fd1498Szrj     return NULL_TREE;
701*38fd1498Szrj 
702*38fd1498Szrj   *poffset = const_offset;
703*38fd1498Szrj   *psize = const_size;
704*38fd1498Szrj   *preverse = reverse;
705*38fd1498Szrj   return decl;
706*38fd1498Szrj }
707*38fd1498Szrj 
708*38fd1498Szrj /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
709*38fd1498Szrj    denotes the starting address of the memory access EXP.
710*38fd1498Szrj    Returns NULL_TREE if the offset is not constant or any component
711*38fd1498Szrj    is not BITS_PER_UNIT-aligned.
712*38fd1498Szrj    VALUEIZE if non-NULL is used to valueize SSA names.  It should return
713*38fd1498Szrj    its argument or a constant if the argument is known to be constant.  */
714*38fd1498Szrj 
715*38fd1498Szrj tree
get_addr_base_and_unit_offset_1(tree exp,poly_int64_pod * poffset,tree (* valueize)(tree))716*38fd1498Szrj get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
717*38fd1498Szrj 				 tree (*valueize) (tree))
718*38fd1498Szrj {
719*38fd1498Szrj   poly_int64 byte_offset = 0;
720*38fd1498Szrj 
721*38fd1498Szrj   /* Compute cumulative byte-offset for nested component-refs and array-refs,
722*38fd1498Szrj      and find the ultimate containing object.  */
723*38fd1498Szrj   while (1)
724*38fd1498Szrj     {
725*38fd1498Szrj       switch (TREE_CODE (exp))
726*38fd1498Szrj 	{
727*38fd1498Szrj 	case BIT_FIELD_REF:
728*38fd1498Szrj 	  {
729*38fd1498Szrj 	    poly_int64 this_byte_offset;
730*38fd1498Szrj 	    poly_uint64 this_bit_offset;
731*38fd1498Szrj 	    if (!poly_int_tree_p (TREE_OPERAND (exp, 2), &this_bit_offset)
732*38fd1498Szrj 		|| !multiple_p (this_bit_offset, BITS_PER_UNIT,
733*38fd1498Szrj 				&this_byte_offset))
734*38fd1498Szrj 	      return NULL_TREE;
735*38fd1498Szrj 	    byte_offset += this_byte_offset;
736*38fd1498Szrj 	  }
737*38fd1498Szrj 	  break;
738*38fd1498Szrj 
739*38fd1498Szrj 	case COMPONENT_REF:
740*38fd1498Szrj 	  {
741*38fd1498Szrj 	    tree field = TREE_OPERAND (exp, 1);
742*38fd1498Szrj 	    tree this_offset = component_ref_field_offset (exp);
743*38fd1498Szrj 	    poly_int64 hthis_offset;
744*38fd1498Szrj 
745*38fd1498Szrj 	    if (!this_offset
746*38fd1498Szrj 		|| !poly_int_tree_p (this_offset, &hthis_offset)
747*38fd1498Szrj 		|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
748*38fd1498Szrj 		    % BITS_PER_UNIT))
749*38fd1498Szrj 	      return NULL_TREE;
750*38fd1498Szrj 
751*38fd1498Szrj 	    hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
752*38fd1498Szrj 			     / BITS_PER_UNIT);
753*38fd1498Szrj 	    byte_offset += hthis_offset;
754*38fd1498Szrj 	  }
755*38fd1498Szrj 	  break;
756*38fd1498Szrj 
757*38fd1498Szrj 	case ARRAY_REF:
758*38fd1498Szrj 	case ARRAY_RANGE_REF:
759*38fd1498Szrj 	  {
760*38fd1498Szrj 	    tree index = TREE_OPERAND (exp, 1);
761*38fd1498Szrj 	    tree low_bound, unit_size;
762*38fd1498Szrj 
763*38fd1498Szrj 	    if (valueize
764*38fd1498Szrj 		&& TREE_CODE (index) == SSA_NAME)
765*38fd1498Szrj 	      index = (*valueize) (index);
766*38fd1498Szrj 
767*38fd1498Szrj 	    /* If the resulting bit-offset is constant, track it.  */
768*38fd1498Szrj 	    if (poly_int_tree_p (index)
769*38fd1498Szrj 		&& (low_bound = array_ref_low_bound (exp),
770*38fd1498Szrj 		    poly_int_tree_p (low_bound))
771*38fd1498Szrj 		&& (unit_size = array_ref_element_size (exp),
772*38fd1498Szrj 		    TREE_CODE (unit_size) == INTEGER_CST))
773*38fd1498Szrj 	      {
774*38fd1498Szrj 		poly_offset_int woffset
775*38fd1498Szrj 		  = wi::sext (wi::to_poly_offset (index)
776*38fd1498Szrj 			      - wi::to_poly_offset (low_bound),
777*38fd1498Szrj 			      TYPE_PRECISION (TREE_TYPE (index)));
778*38fd1498Szrj 		woffset *= wi::to_offset (unit_size);
779*38fd1498Szrj 		byte_offset += woffset.force_shwi ();
780*38fd1498Szrj 	      }
781*38fd1498Szrj 	    else
782*38fd1498Szrj 	      return NULL_TREE;
783*38fd1498Szrj 	  }
784*38fd1498Szrj 	  break;
785*38fd1498Szrj 
786*38fd1498Szrj 	case REALPART_EXPR:
787*38fd1498Szrj 	  break;
788*38fd1498Szrj 
789*38fd1498Szrj 	case IMAGPART_EXPR:
790*38fd1498Szrj 	  byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
791*38fd1498Szrj 	  break;
792*38fd1498Szrj 
793*38fd1498Szrj 	case VIEW_CONVERT_EXPR:
794*38fd1498Szrj 	  break;
795*38fd1498Szrj 
796*38fd1498Szrj 	case MEM_REF:
797*38fd1498Szrj 	  {
798*38fd1498Szrj 	    tree base = TREE_OPERAND (exp, 0);
799*38fd1498Szrj 	    if (valueize
800*38fd1498Szrj 		&& TREE_CODE (base) == SSA_NAME)
801*38fd1498Szrj 	      base = (*valueize) (base);
802*38fd1498Szrj 
803*38fd1498Szrj 	    /* Hand back the decl for MEM[&decl, off].  */
804*38fd1498Szrj 	    if (TREE_CODE (base) == ADDR_EXPR)
805*38fd1498Szrj 	      {
806*38fd1498Szrj 		if (!integer_zerop (TREE_OPERAND (exp, 1)))
807*38fd1498Szrj 		  {
808*38fd1498Szrj 		    poly_offset_int off = mem_ref_offset (exp);
809*38fd1498Szrj 		    byte_offset += off.force_shwi ();
810*38fd1498Szrj 		  }
811*38fd1498Szrj 		exp = TREE_OPERAND (base, 0);
812*38fd1498Szrj 	      }
813*38fd1498Szrj 	    goto done;
814*38fd1498Szrj 	  }
815*38fd1498Szrj 
816*38fd1498Szrj 	case TARGET_MEM_REF:
817*38fd1498Szrj 	  {
818*38fd1498Szrj 	    tree base = TREE_OPERAND (exp, 0);
819*38fd1498Szrj 	    if (valueize
820*38fd1498Szrj 		&& TREE_CODE (base) == SSA_NAME)
821*38fd1498Szrj 	      base = (*valueize) (base);
822*38fd1498Szrj 
823*38fd1498Szrj 	    /* Hand back the decl for MEM[&decl, off].  */
824*38fd1498Szrj 	    if (TREE_CODE (base) == ADDR_EXPR)
825*38fd1498Szrj 	      {
826*38fd1498Szrj 		if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
827*38fd1498Szrj 		  return NULL_TREE;
828*38fd1498Szrj 		if (!integer_zerop (TMR_OFFSET (exp)))
829*38fd1498Szrj 		  {
830*38fd1498Szrj 		    poly_offset_int off = mem_ref_offset (exp);
831*38fd1498Szrj 		    byte_offset += off.force_shwi ();
832*38fd1498Szrj 		  }
833*38fd1498Szrj 		exp = TREE_OPERAND (base, 0);
834*38fd1498Szrj 	      }
835*38fd1498Szrj 	    goto done;
836*38fd1498Szrj 	  }
837*38fd1498Szrj 
838*38fd1498Szrj 	default:
839*38fd1498Szrj 	  goto done;
840*38fd1498Szrj 	}
841*38fd1498Szrj 
842*38fd1498Szrj       exp = TREE_OPERAND (exp, 0);
843*38fd1498Szrj     }
844*38fd1498Szrj done:
845*38fd1498Szrj 
846*38fd1498Szrj   *poffset = byte_offset;
847*38fd1498Szrj   return exp;
848*38fd1498Szrj }
849*38fd1498Szrj 
850*38fd1498Szrj /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
851*38fd1498Szrj    denotes the starting address of the memory access EXP.
852*38fd1498Szrj    Returns NULL_TREE if the offset is not constant or any component
853*38fd1498Szrj    is not BITS_PER_UNIT-aligned.  */
854*38fd1498Szrj 
855*38fd1498Szrj tree
get_addr_base_and_unit_offset(tree exp,poly_int64_pod * poffset)856*38fd1498Szrj get_addr_base_and_unit_offset (tree exp, poly_int64_pod *poffset)
857*38fd1498Szrj {
858*38fd1498Szrj   return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
859*38fd1498Szrj }
860*38fd1498Szrj 
861*38fd1498Szrj /* Returns true if STMT references an SSA_NAME that has
862*38fd1498Szrj    SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false.  */
863*38fd1498Szrj 
864*38fd1498Szrj bool
stmt_references_abnormal_ssa_name(gimple * stmt)865*38fd1498Szrj stmt_references_abnormal_ssa_name (gimple *stmt)
866*38fd1498Szrj {
867*38fd1498Szrj   ssa_op_iter oi;
868*38fd1498Szrj   use_operand_p use_p;
869*38fd1498Szrj 
870*38fd1498Szrj   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
871*38fd1498Szrj     {
872*38fd1498Szrj       if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
873*38fd1498Szrj 	return true;
874*38fd1498Szrj     }
875*38fd1498Szrj 
876*38fd1498Szrj   return false;
877*38fd1498Szrj }
878*38fd1498Szrj 
879*38fd1498Szrj /* If STMT takes any abnormal PHI values as input, replace them with
880*38fd1498Szrj    local copies.  */
881*38fd1498Szrj 
882*38fd1498Szrj void
replace_abnormal_ssa_names(gimple * stmt)883*38fd1498Szrj replace_abnormal_ssa_names (gimple *stmt)
884*38fd1498Szrj {
885*38fd1498Szrj   ssa_op_iter oi;
886*38fd1498Szrj   use_operand_p use_p;
887*38fd1498Szrj 
888*38fd1498Szrj   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
889*38fd1498Szrj     {
890*38fd1498Szrj       tree op = USE_FROM_PTR (use_p);
891*38fd1498Szrj       if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
892*38fd1498Szrj 	{
893*38fd1498Szrj 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
894*38fd1498Szrj 	  tree new_name = make_ssa_name (TREE_TYPE (op));
895*38fd1498Szrj 	  gassign *assign = gimple_build_assign (new_name, op);
896*38fd1498Szrj 	  gsi_insert_before (&gsi, assign, GSI_SAME_STMT);
897*38fd1498Szrj 	  SET_USE (use_p, new_name);
898*38fd1498Szrj 	}
899*38fd1498Szrj     }
900*38fd1498Szrj }
901*38fd1498Szrj 
902*38fd1498Szrj /* Pair of tree and a sorting index, for dump_enumerated_decls.  */
903*38fd1498Szrj struct GTY(()) numbered_tree
904*38fd1498Szrj {
905*38fd1498Szrj   tree t;
906*38fd1498Szrj   int num;
907*38fd1498Szrj };
908*38fd1498Szrj 
909*38fd1498Szrj 
910*38fd1498Szrj /* Compare two declarations references by their DECL_UID / sequence number.
911*38fd1498Szrj    Called via qsort.  */
912*38fd1498Szrj 
913*38fd1498Szrj static int
compare_decls_by_uid(const void * pa,const void * pb)914*38fd1498Szrj compare_decls_by_uid (const void *pa, const void *pb)
915*38fd1498Szrj {
916*38fd1498Szrj   const numbered_tree *nt_a = ((const numbered_tree *)pa);
917*38fd1498Szrj   const numbered_tree *nt_b = ((const numbered_tree *)pb);
918*38fd1498Szrj 
919*38fd1498Szrj   if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
920*38fd1498Szrj     return  DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
921*38fd1498Szrj   return nt_a->num - nt_b->num;
922*38fd1498Szrj }
923*38fd1498Szrj 
924*38fd1498Szrj /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls.  */
925*38fd1498Szrj static tree
dump_enumerated_decls_push(tree * tp,int * walk_subtrees,void * data)926*38fd1498Szrj dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
927*38fd1498Szrj {
928*38fd1498Szrj   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
929*38fd1498Szrj   vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
930*38fd1498Szrj   numbered_tree nt;
931*38fd1498Szrj 
932*38fd1498Szrj   if (!DECL_P (*tp))
933*38fd1498Szrj     return NULL_TREE;
934*38fd1498Szrj   nt.t = *tp;
935*38fd1498Szrj   nt.num = list->length ();
936*38fd1498Szrj   list->safe_push (nt);
937*38fd1498Szrj   *walk_subtrees = 0;
938*38fd1498Szrj   return NULL_TREE;
939*38fd1498Szrj }
940*38fd1498Szrj 
941*38fd1498Szrj /* Find all the declarations used by the current function, sort them by uid,
942*38fd1498Szrj    and emit the sorted list.  Each declaration is tagged with a sequence
943*38fd1498Szrj    number indicating when it was found during statement / tree walking,
944*38fd1498Szrj    so that TDF_NOUID comparisons of anonymous declarations are still
945*38fd1498Szrj    meaningful.  Where a declaration was encountered more than once, we
946*38fd1498Szrj    emit only the sequence number of the first encounter.
947*38fd1498Szrj    FILE is the dump file where to output the list and FLAGS is as in
948*38fd1498Szrj    print_generic_expr.  */
949*38fd1498Szrj void
dump_enumerated_decls(FILE * file,dump_flags_t flags)950*38fd1498Szrj dump_enumerated_decls (FILE *file, dump_flags_t flags)
951*38fd1498Szrj {
952*38fd1498Szrj   basic_block bb;
953*38fd1498Szrj   struct walk_stmt_info wi;
954*38fd1498Szrj   auto_vec<numbered_tree, 40> decl_list;
955*38fd1498Szrj 
956*38fd1498Szrj   memset (&wi, '\0', sizeof (wi));
957*38fd1498Szrj   wi.info = (void *) &decl_list;
958*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
959*38fd1498Szrj     {
960*38fd1498Szrj       gimple_stmt_iterator gsi;
961*38fd1498Szrj 
962*38fd1498Szrj       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
963*38fd1498Szrj 	if (!is_gimple_debug (gsi_stmt (gsi)))
964*38fd1498Szrj 	  walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
965*38fd1498Szrj     }
966*38fd1498Szrj   decl_list.qsort (compare_decls_by_uid);
967*38fd1498Szrj   if (decl_list.length ())
968*38fd1498Szrj     {
969*38fd1498Szrj       unsigned ix;
970*38fd1498Szrj       numbered_tree *ntp;
971*38fd1498Szrj       tree last = NULL_TREE;
972*38fd1498Szrj 
973*38fd1498Szrj       fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
974*38fd1498Szrj 	       current_function_name ());
975*38fd1498Szrj       FOR_EACH_VEC_ELT (decl_list, ix, ntp)
976*38fd1498Szrj 	{
977*38fd1498Szrj 	  if (ntp->t == last)
978*38fd1498Szrj 	    continue;
979*38fd1498Szrj 	  fprintf (file, "%d: ", ntp->num);
980*38fd1498Szrj 	  print_generic_decl (file, ntp->t, flags);
981*38fd1498Szrj 	  fprintf (file, "\n");
982*38fd1498Szrj 	  last = ntp->t;
983*38fd1498Szrj 	}
984*38fd1498Szrj     }
985*38fd1498Szrj }
986