xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-dfa.c (revision f3cfa6f6ce31685c6c4a758bc430e69eb99f50a4)
1 /* Data flow functions for trees.
2    Copyright (C) 2001-2016 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-pretty-print.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "langhooks.h"
34 #include "gimple-iterator.h"
35 #include "gimple-walk.h"
36 #include "tree-dfa.h"
37 
38 /* Build and maintain data flow information for trees.  */
39 
40 /* Counters used to display DFA and SSA statistics.  */
41 struct dfa_stats_d
42 {
43   long num_defs;
44   long num_uses;
45   long num_phis;
46   long num_phi_args;
47   size_t max_num_phi_args;
48   long num_vdefs;
49   long num_vuses;
50 };
51 
52 
53 /* Local functions.  */
54 static void collect_dfa_stats (struct dfa_stats_d *);
55 
56 
57 /*---------------------------------------------------------------------------
58 			Dataflow analysis (DFA) routines
59 ---------------------------------------------------------------------------*/
60 
61 /* Renumber all of the gimple stmt uids.  */
62 
63 void
64 renumber_gimple_stmt_uids (void)
65 {
66   basic_block bb;
67 
68   set_gimple_stmt_max_uid (cfun, 0);
69   FOR_ALL_BB_FN (bb, cfun)
70     {
71       gimple_stmt_iterator bsi;
72       for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
73 	{
74 	  gimple *stmt = gsi_stmt (bsi);
75 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
76 	}
77       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
78 	{
79 	  gimple *stmt = gsi_stmt (bsi);
80 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
81 	}
82     }
83 }
84 
85 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
86    in BLOCKS, of which there are N_BLOCKS.  Also renumbers PHIs.  */
87 
88 void
89 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
90 {
91   int i;
92 
93   set_gimple_stmt_max_uid (cfun, 0);
94   for (i = 0; i < n_blocks; i++)
95     {
96       basic_block bb = blocks[i];
97       gimple_stmt_iterator bsi;
98       for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
99 	{
100 	  gimple *stmt = gsi_stmt (bsi);
101 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
102 	}
103       for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
104 	{
105 	  gimple *stmt = gsi_stmt (bsi);
106 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
107 	}
108     }
109 }
110 
111 
112 
113 /*---------------------------------------------------------------------------
114 			      Debugging functions
115 ---------------------------------------------------------------------------*/
116 
117 /* Dump variable VAR and its may-aliases to FILE.  */
118 
119 void
120 dump_variable (FILE *file, tree var)
121 {
122   if (TREE_CODE (var) == SSA_NAME)
123     {
124       if (POINTER_TYPE_P (TREE_TYPE (var)))
125 	dump_points_to_info_for (file, var);
126       var = SSA_NAME_VAR (var);
127     }
128 
129   if (var == NULL_TREE)
130     {
131       fprintf (file, "<nil>");
132       return;
133     }
134 
135   print_generic_expr (file, var, dump_flags);
136 
137   fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
138   if (DECL_PT_UID (var) != DECL_UID (var))
139     fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
140 
141   fprintf (file, ", ");
142   print_generic_expr (file, TREE_TYPE (var), dump_flags);
143 
144   if (TREE_ADDRESSABLE (var))
145     fprintf (file, ", is addressable");
146 
147   if (is_global_var (var))
148     fprintf (file, ", is global");
149 
150   if (TREE_THIS_VOLATILE (var))
151     fprintf (file, ", is volatile");
152 
153   if (cfun && ssa_default_def (cfun, var))
154     {
155       fprintf (file, ", default def: ");
156       print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
157     }
158 
159   if (DECL_INITIAL (var))
160     {
161       fprintf (file, ", initial: ");
162       print_generic_expr (file, DECL_INITIAL (var), dump_flags);
163     }
164 
165   fprintf (file, "\n");
166 }
167 
168 
169 /* Dump variable VAR and its may-aliases to stderr.  */
170 
171 DEBUG_FUNCTION void
172 debug_variable (tree var)
173 {
174   dump_variable (stderr, var);
175 }
176 
177 
178 /* Dump various DFA statistics to FILE.  */
179 
180 void
181 dump_dfa_stats (FILE *file)
182 {
183   struct dfa_stats_d dfa_stats;
184 
185   unsigned long size, total = 0;
186   const char * const fmt_str   = "%-30s%-13s%12s\n";
187   const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
188   const char * const fmt_str_3 = "%-43s%11lu%c\n";
189   const char *funcname
190     = lang_hooks.decl_printable_name (current_function_decl, 2);
191 
192   collect_dfa_stats (&dfa_stats);
193 
194   fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
195 
196   fprintf (file, "---------------------------------------------------------\n");
197   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
198   fprintf (file, fmt_str, "", "  instances  ", "used ");
199   fprintf (file, "---------------------------------------------------------\n");
200 
201   size = dfa_stats.num_uses * sizeof (tree *);
202   total += size;
203   fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
204 	   SCALE (size), LABEL (size));
205 
206   size = dfa_stats.num_defs * sizeof (tree *);
207   total += size;
208   fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
209 	   SCALE (size), LABEL (size));
210 
211   size = dfa_stats.num_vuses * sizeof (tree *);
212   total += size;
213   fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
214 	   SCALE (size), LABEL (size));
215 
216   size = dfa_stats.num_vdefs * sizeof (tree *);
217   total += size;
218   fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
219 	   SCALE (size), LABEL (size));
220 
221   size = dfa_stats.num_phis * sizeof (struct gphi);
222   total += size;
223   fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
224 	   SCALE (size), LABEL (size));
225 
226   size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
227   total += size;
228   fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
229  	   SCALE (size), LABEL (size));
230 
231   fprintf (file, "---------------------------------------------------------\n");
232   fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", SCALE (total),
233 	   LABEL (total));
234   fprintf (file, "---------------------------------------------------------\n");
235   fprintf (file, "\n");
236 
237   if (dfa_stats.num_phis)
238     fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
239 	     (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
240 	     (long) dfa_stats.max_num_phi_args);
241 
242   fprintf (file, "\n");
243 }
244 
245 
246 /* Dump DFA statistics on stderr.  */
247 
248 DEBUG_FUNCTION void
249 debug_dfa_stats (void)
250 {
251   dump_dfa_stats (stderr);
252 }
253 
254 
255 /* Collect DFA statistics and store them in the structure pointed to by
256    DFA_STATS_P.  */
257 
258 static void
259 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
260 {
261   basic_block bb;
262 
263   gcc_assert (dfa_stats_p);
264 
265   memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
266 
267   /* Walk all the statements in the function counting references.  */
268   FOR_EACH_BB_FN (bb, cfun)
269     {
270       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
271 	   gsi_next (&si))
272 	{
273 	  gphi *phi = si.phi ();
274 	  dfa_stats_p->num_phis++;
275 	  dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
276 	  if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
277 	    dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
278 	}
279 
280       for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
281 	   gsi_next (&si))
282 	{
283 	  gimple *stmt = gsi_stmt (si);
284 	  dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
285 	  dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
286 	  dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
287 	  dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
288 	}
289     }
290 }
291 
292 
293 /*---------------------------------------------------------------------------
294 			     Miscellaneous helpers
295 ---------------------------------------------------------------------------*/
296 
297 /* Lookup VAR UID in the default_defs hashtable and return the associated
298    variable.  */
299 
300 tree
301 ssa_default_def (struct function *fn, tree var)
302 {
303   struct tree_decl_minimal ind;
304   struct tree_ssa_name in;
305   gcc_assert (TREE_CODE (var) == VAR_DECL
306 	      || TREE_CODE (var) == PARM_DECL
307 	      || TREE_CODE (var) == RESULT_DECL);
308   in.var = (tree)&ind;
309   ind.uid = DECL_UID (var);
310   return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var));
311 }
312 
313 /* Insert the pair VAR's UID, DEF into the default_defs hashtable
314    of function FN.  */
315 
316 void
317 set_ssa_default_def (struct function *fn, tree var, tree def)
318 {
319   struct tree_decl_minimal ind;
320   struct tree_ssa_name in;
321 
322   gcc_assert (TREE_CODE (var) == VAR_DECL
323 	      || TREE_CODE (var) == PARM_DECL
324 	      || TREE_CODE (var) == RESULT_DECL);
325   in.var = (tree)&ind;
326   ind.uid = DECL_UID (var);
327   if (!def)
328     {
329       tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
330 							  DECL_UID (var),
331 							  NO_INSERT);
332       if (loc)
333 	{
334 	  SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
335 	  DEFAULT_DEFS (fn)->clear_slot (loc);
336 	}
337       return;
338     }
339   gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
340   tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
341 						      DECL_UID (var), INSERT);
342 
343   /* Default definition might be changed by tail call optimization.  */
344   if (*loc)
345     SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
346 
347    /* Mark DEF as the default definition for VAR.  */
348   *loc = def;
349   SSA_NAME_IS_DEFAULT_DEF (def) = true;
350 }
351 
352 /* Retrieve or create a default definition for VAR.  */
353 
354 tree
355 get_or_create_ssa_default_def (struct function *fn, tree var)
356 {
357   tree ddef = ssa_default_def (fn, var);
358   if (ddef == NULL_TREE)
359     {
360       ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
361       set_ssa_default_def (fn, var, ddef);
362     }
363   return ddef;
364 }
365 
366 
367 /* If EXP is a handled component reference for a structure, return the
368    base variable.  The access range is delimited by bit positions *POFFSET and
369    *POFFSET + *PMAX_SIZE.  The access size is *PSIZE bits.  If either
370    *PSIZE or *PMAX_SIZE is -1, they could not be determined.  If *PSIZE
371    and *PMAX_SIZE are equal, the access is non-variable.  If *PREVERSE is
372    true, the storage order of the reference is reversed.  */
373 
374 tree
375 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
376 			 HOST_WIDE_INT *psize,
377 			 HOST_WIDE_INT *pmax_size,
378 			 bool *preverse)
379 {
380   offset_int bitsize = -1;
381   offset_int maxsize;
382   tree size_tree = NULL_TREE;
383   offset_int bit_offset = 0;
384   bool seen_variable_array_ref = false;
385 
386   /* First get the final access size and the storage order from just the
387      outermost expression.  */
388   if (TREE_CODE (exp) == COMPONENT_REF)
389     size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
390   else if (TREE_CODE (exp) == BIT_FIELD_REF)
391     size_tree = TREE_OPERAND (exp, 1);
392   else if (!VOID_TYPE_P (TREE_TYPE (exp)))
393     {
394       machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
395       if (mode == BLKmode)
396 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
397       else
398 	bitsize = int (GET_MODE_BITSIZE (mode));
399     }
400   if (size_tree != NULL_TREE
401       && TREE_CODE (size_tree) == INTEGER_CST)
402     bitsize = wi::to_offset (size_tree);
403 
404   *preverse = reverse_storage_order_for_component_p (exp);
405 
406   /* Initially, maxsize is the same as the accessed element size.
407      In the following it will only grow (or become -1).  */
408   maxsize = bitsize;
409 
410   /* Compute cumulative bit-offset for nested component-refs and array-refs,
411      and find the ultimate containing object.  */
412   while (1)
413     {
414       switch (TREE_CODE (exp))
415 	{
416 	case BIT_FIELD_REF:
417 	  bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
418 	  break;
419 
420 	case COMPONENT_REF:
421 	  {
422 	    tree field = TREE_OPERAND (exp, 1);
423 	    tree this_offset = component_ref_field_offset (exp);
424 
425 	    if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
426 	      {
427 		offset_int woffset = wi::lshift (wi::to_offset (this_offset),
428 						 LOG2_BITS_PER_UNIT);
429 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
430 		bit_offset += woffset;
431 
432 		/* If we had seen a variable array ref already and we just
433 		   referenced the last field of a struct or a union member
434 		   then we have to adjust maxsize by the padding at the end
435 		   of our field.  */
436 		if (seen_variable_array_ref)
437 		  {
438 		    tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
439 		    tree next = DECL_CHAIN (field);
440 		    while (next && TREE_CODE (next) != FIELD_DECL)
441 		      next = DECL_CHAIN (next);
442 		    if (!next
443 			|| TREE_CODE (stype) != RECORD_TYPE)
444 		      {
445 			tree fsize = DECL_SIZE_UNIT (field);
446 			tree ssize = TYPE_SIZE_UNIT (stype);
447 			if (fsize == NULL
448 			    || TREE_CODE (fsize) != INTEGER_CST
449 			    || ssize == NULL
450 			    || TREE_CODE (ssize) != INTEGER_CST)
451 			  maxsize = -1;
452 			else if (maxsize != -1)
453 			  {
454 			    offset_int tem = (wi::to_offset (ssize)
455 					      - wi::to_offset (fsize));
456 			    tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
457 			    tem -= woffset;
458 			    maxsize += tem;
459 			  }
460 		      }
461 		    /* An component ref with an adjacent field up in the
462 		       structure hierarchy constrains the size of any variable
463 		       array ref lower in the access hierarchy.  */
464 		    else
465 		      seen_variable_array_ref = false;
466 		  }
467 	      }
468 	    else
469 	      {
470 		tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
471 		/* We need to adjust maxsize to the whole structure bitsize.
472 		   But we can subtract any constant offset seen so far,
473 		   because that would get us out of the structure otherwise.  */
474 		if (maxsize != -1
475 		    && csize
476 		    && TREE_CODE (csize) == INTEGER_CST)
477 		  maxsize = wi::to_offset (csize) - bit_offset;
478 		else
479 		  maxsize = -1;
480 	      }
481 	  }
482 	  break;
483 
484 	case ARRAY_REF:
485 	case ARRAY_RANGE_REF:
486 	  {
487 	    tree index = TREE_OPERAND (exp, 1);
488 	    tree low_bound, unit_size;
489 
490 	    /* If the resulting bit-offset is constant, track it.  */
491 	    if (TREE_CODE (index) == INTEGER_CST
492 		&& (low_bound = array_ref_low_bound (exp),
493  		    TREE_CODE (low_bound) == INTEGER_CST)
494 		&& (unit_size = array_ref_element_size (exp),
495 		    TREE_CODE (unit_size) == INTEGER_CST))
496 	      {
497 		offset_int woffset
498 		  = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
499 			      TYPE_PRECISION (TREE_TYPE (index)));
500 		woffset *= wi::to_offset (unit_size);
501 		woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
502 		bit_offset += woffset;
503 
504 		/* An array ref with a constant index up in the structure
505 		   hierarchy will constrain the size of any variable array ref
506 		   lower in the access hierarchy.  */
507 		seen_variable_array_ref = false;
508 	      }
509 	    else
510 	      {
511 		tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
512 		/* We need to adjust maxsize to the whole array bitsize.
513 		   But we can subtract any constant offset seen so far,
514 		   because that would get us outside of the array otherwise.  */
515 		if (maxsize != -1
516 		    && asize
517 		    && TREE_CODE (asize) == INTEGER_CST)
518 		  maxsize = wi::to_offset (asize) - bit_offset;
519 		else
520 		  maxsize = -1;
521 
522 		/* Remember that we have seen an array ref with a variable
523 		   index.  */
524 		seen_variable_array_ref = true;
525 	      }
526 	  }
527 	  break;
528 
529 	case REALPART_EXPR:
530 	  break;
531 
532 	case IMAGPART_EXPR:
533 	  bit_offset += bitsize;
534 	  break;
535 
536 	case VIEW_CONVERT_EXPR:
537 	  break;
538 
539 	case TARGET_MEM_REF:
540 	  /* Via the variable index or index2 we can reach the
541 	     whole object.  Still hand back the decl here.  */
542 	  if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
543 	      && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
544 	    {
545 	      exp = TREE_OPERAND (TMR_BASE (exp), 0);
546 	      bit_offset = 0;
547 	      maxsize = -1;
548 	      goto done;
549 	    }
550 	  /* Fallthru.  */
551 	case MEM_REF:
552 	  /* We need to deal with variable arrays ending structures such as
553 	     struct { int length; int a[1]; } x;           x.a[d]
554 	     struct { struct { int a; int b; } a[1]; } x;  x.a[d].a
555 	     struct { struct { int a[1]; } a[1]; } x;      x.a[0][d], x.a[d][0]
556 	     struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
557 	     where we do not know maxsize for variable index accesses to
558 	     the array.  The simplest way to conservatively deal with this
559 	     is to punt in the case that offset + maxsize reaches the
560 	     base type boundary.  This needs to include possible trailing
561 	     padding that is there for alignment purposes.  */
562 	  if (seen_variable_array_ref
563 	      && maxsize != -1
564 	      && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
565 		  || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
566 		  || (bit_offset + maxsize
567 		      == wi::to_offset (TYPE_SIZE (TREE_TYPE (exp))))))
568 	    maxsize = -1;
569 
570 	  /* Hand back the decl for MEM[&decl, off].  */
571 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
572 	    {
573 	      if (integer_zerop (TREE_OPERAND (exp, 1)))
574 		exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
575 	      else
576 		{
577 		  offset_int off = mem_ref_offset (exp);
578 		  off = wi::lshift (off, LOG2_BITS_PER_UNIT);
579 		  off += bit_offset;
580 		  if (wi::fits_shwi_p (off))
581 		    {
582 		      bit_offset = off;
583 		      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
584 		    }
585 		}
586 	    }
587 	  goto done;
588 
589 	default:
590 	  goto done;
591 	}
592 
593       exp = TREE_OPERAND (exp, 0);
594     }
595 
596  done:
597   if (!wi::fits_shwi_p (bitsize) || wi::neg_p (bitsize))
598     {
599       *poffset = 0;
600       *psize = -1;
601       *pmax_size = -1;
602 
603       return exp;
604     }
605 
606   *psize = bitsize.to_shwi ();
607 
608   if (!wi::fits_shwi_p (bit_offset))
609     {
610       *poffset = 0;
611       *pmax_size = -1;
612 
613       return exp;
614     }
615 
616   /* In case of a decl or constant base object we can do better.  */
617 
618   if (DECL_P (exp))
619     {
620       if (VAR_P (exp)
621 	  && ((flag_unconstrained_commons && DECL_COMMON (exp))
622 	      || (DECL_EXTERNAL (exp) && seen_variable_array_ref)))
623 	{
624 	  tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
625 	  /* If size is unknown, or we have read to the end, assume there
626 	     may be more to the structure than we are told.  */
627 	  if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
628 	      || (seen_variable_array_ref
629 		  && (sz_tree == NULL_TREE
630 		      || TREE_CODE (sz_tree) != INTEGER_CST
631 		      || (bit_offset + maxsize == wi::to_offset (sz_tree)))))
632 	    maxsize = -1;
633 	}
634       /* If maxsize is unknown adjust it according to the size of the
635          base decl.  */
636       else if (maxsize == -1
637 	  && DECL_SIZE (exp)
638 	  && TREE_CODE (DECL_SIZE (exp)) == INTEGER_CST)
639 	maxsize = wi::to_offset (DECL_SIZE (exp)) - bit_offset;
640     }
641   else if (CONSTANT_CLASS_P (exp))
642     {
643       /* If maxsize is unknown adjust it according to the size of the
644          base type constant.  */
645       if (maxsize == -1
646 	  && TYPE_SIZE (TREE_TYPE (exp))
647 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
648 	maxsize = (wi::to_offset (TYPE_SIZE (TREE_TYPE (exp)))
649 		   - bit_offset);
650     }
651 
652   /* ???  Due to negative offsets in ARRAY_REF we can end up with
653      negative bit_offset here.  We might want to store a zero offset
654      in this case.  */
655   *poffset = bit_offset.to_shwi ();
656   if (!wi::fits_shwi_p (maxsize) || wi::neg_p (maxsize))
657     *pmax_size = -1;
658   else
659     *pmax_size = maxsize.to_shwi ();
660 
661   return exp;
662 }
663 
664 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
665    denotes the starting address of the memory access EXP.
666    Returns NULL_TREE if the offset is not constant or any component
667    is not BITS_PER_UNIT-aligned.
668    VALUEIZE if non-NULL is used to valueize SSA names.  It should return
669    its argument or a constant if the argument is known to be constant.  */
670 
671 tree
672 get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
673 				 tree (*valueize) (tree))
674 {
675   HOST_WIDE_INT byte_offset = 0;
676 
677   /* Compute cumulative byte-offset for nested component-refs and array-refs,
678      and find the ultimate containing object.  */
679   while (1)
680     {
681       switch (TREE_CODE (exp))
682 	{
683 	case BIT_FIELD_REF:
684 	  {
685 	    HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
686 	    if (this_off % BITS_PER_UNIT)
687 	      return NULL_TREE;
688 	    byte_offset += this_off / BITS_PER_UNIT;
689 	  }
690 	  break;
691 
692 	case COMPONENT_REF:
693 	  {
694 	    tree field = TREE_OPERAND (exp, 1);
695 	    tree this_offset = component_ref_field_offset (exp);
696 	    HOST_WIDE_INT hthis_offset;
697 
698 	    if (!this_offset
699 		|| TREE_CODE (this_offset) != INTEGER_CST
700 		|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
701 		    % BITS_PER_UNIT))
702 	      return NULL_TREE;
703 
704 	    hthis_offset = TREE_INT_CST_LOW (this_offset);
705 	    hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
706 			     / BITS_PER_UNIT);
707 	    byte_offset += hthis_offset;
708 	  }
709 	  break;
710 
711 	case ARRAY_REF:
712 	case ARRAY_RANGE_REF:
713 	  {
714 	    tree index = TREE_OPERAND (exp, 1);
715 	    tree low_bound, unit_size;
716 
717 	    if (valueize
718 		&& TREE_CODE (index) == SSA_NAME)
719 	      index = (*valueize) (index);
720 
721 	    /* If the resulting bit-offset is constant, track it.  */
722 	    if (TREE_CODE (index) == INTEGER_CST
723 		&& (low_bound = array_ref_low_bound (exp),
724 		    TREE_CODE (low_bound) == INTEGER_CST)
725 		&& (unit_size = array_ref_element_size (exp),
726 		    TREE_CODE (unit_size) == INTEGER_CST))
727 	      {
728 		offset_int woffset
729 		  = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
730 			      TYPE_PRECISION (TREE_TYPE (index)));
731 		woffset *= wi::to_offset (unit_size);
732 		byte_offset += woffset.to_shwi ();
733 	      }
734 	    else
735 	      return NULL_TREE;
736 	  }
737 	  break;
738 
739 	case REALPART_EXPR:
740 	  break;
741 
742 	case IMAGPART_EXPR:
743 	  byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
744 	  break;
745 
746 	case VIEW_CONVERT_EXPR:
747 	  break;
748 
749 	case MEM_REF:
750 	  {
751 	    tree base = TREE_OPERAND (exp, 0);
752 	    if (valueize
753 		&& TREE_CODE (base) == SSA_NAME)
754 	      base = (*valueize) (base);
755 
756 	    /* Hand back the decl for MEM[&decl, off].  */
757 	    if (TREE_CODE (base) == ADDR_EXPR)
758 	      {
759 		if (!integer_zerop (TREE_OPERAND (exp, 1)))
760 		  {
761 		    offset_int off = mem_ref_offset (exp);
762 		    byte_offset += off.to_short_addr ();
763 		  }
764 		exp = TREE_OPERAND (base, 0);
765 	      }
766 	    goto done;
767 	  }
768 
769 	case TARGET_MEM_REF:
770 	  {
771 	    tree base = TREE_OPERAND (exp, 0);
772 	    if (valueize
773 		&& TREE_CODE (base) == SSA_NAME)
774 	      base = (*valueize) (base);
775 
776 	    /* Hand back the decl for MEM[&decl, off].  */
777 	    if (TREE_CODE (base) == ADDR_EXPR)
778 	      {
779 		if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
780 		  return NULL_TREE;
781 		if (!integer_zerop (TMR_OFFSET (exp)))
782 		  {
783 		    offset_int off = mem_ref_offset (exp);
784 		    byte_offset += off.to_short_addr ();
785 		  }
786 		exp = TREE_OPERAND (base, 0);
787 	      }
788 	    goto done;
789 	  }
790 
791 	default:
792 	  goto done;
793 	}
794 
795       exp = TREE_OPERAND (exp, 0);
796     }
797 done:
798 
799   *poffset = byte_offset;
800   return exp;
801 }
802 
803 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
804    denotes the starting address of the memory access EXP.
805    Returns NULL_TREE if the offset is not constant or any component
806    is not BITS_PER_UNIT-aligned.  */
807 
808 tree
809 get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
810 {
811   return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
812 }
813 
814 /* Returns true if STMT references an SSA_NAME that has
815    SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false.  */
816 
817 bool
818 stmt_references_abnormal_ssa_name (gimple *stmt)
819 {
820   ssa_op_iter oi;
821   use_operand_p use_p;
822 
823   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
824     {
825       if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
826 	return true;
827     }
828 
829   return false;
830 }
831 
832 /* Pair of tree and a sorting index, for dump_enumerated_decls.  */
833 struct GTY(()) numbered_tree
834 {
835   tree t;
836   int num;
837 };
838 
839 
840 /* Compare two declarations references by their DECL_UID / sequence number.
841    Called via qsort.  */
842 
843 static int
844 compare_decls_by_uid (const void *pa, const void *pb)
845 {
846   const numbered_tree *nt_a = ((const numbered_tree *)pa);
847   const numbered_tree *nt_b = ((const numbered_tree *)pb);
848 
849   if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
850     return  DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
851   return nt_a->num - nt_b->num;
852 }
853 
854 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls.  */
855 static tree
856 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
857 {
858   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
859   vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
860   numbered_tree nt;
861 
862   if (!DECL_P (*tp))
863     return NULL_TREE;
864   nt.t = *tp;
865   nt.num = list->length ();
866   list->safe_push (nt);
867   *walk_subtrees = 0;
868   return NULL_TREE;
869 }
870 
871 /* Find all the declarations used by the current function, sort them by uid,
872    and emit the sorted list.  Each declaration is tagged with a sequence
873    number indicating when it was found during statement / tree walking,
874    so that TDF_NOUID comparisons of anonymous declarations are still
875    meaningful.  Where a declaration was encountered more than once, we
876    emit only the sequence number of the first encounter.
877    FILE is the dump file where to output the list and FLAGS is as in
878    print_generic_expr.  */
879 void
880 dump_enumerated_decls (FILE *file, int flags)
881 {
882   basic_block bb;
883   struct walk_stmt_info wi;
884   auto_vec<numbered_tree, 40> decl_list;
885 
886   memset (&wi, '\0', sizeof (wi));
887   wi.info = (void *) &decl_list;
888   FOR_EACH_BB_FN (bb, cfun)
889     {
890       gimple_stmt_iterator gsi;
891 
892       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
893 	if (!is_gimple_debug (gsi_stmt (gsi)))
894 	  walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
895     }
896   decl_list.qsort (compare_decls_by_uid);
897   if (decl_list.length ())
898     {
899       unsigned ix;
900       numbered_tree *ntp;
901       tree last = NULL_TREE;
902 
903       fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
904 	       current_function_name ());
905       FOR_EACH_VEC_ELT (decl_list, ix, ntp)
906 	{
907 	  if (ntp->t == last)
908 	    continue;
909 	  fprintf (file, "%d: ", ntp->num);
910 	  print_generic_decl (file, ntp->t, flags);
911 	  fprintf (file, "\n");
912 	  last = ntp->t;
913 	}
914     }
915 }
916