xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-live.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Liveness for SSA trees.
2    Copyright (C) 2003-2015 Free Software Foundation, Inc.
3    Contributed by Andrew MacLeod <amacleod@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "gimple-pretty-print.h"
38 #include "bitmap.h"
39 #include "sbitmap.h"
40 #include "predict.h"
41 #include "hard-reg-set.h"
42 #include "function.h"
43 #include "dominance.h"
44 #include "cfg.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "is-a.h"
50 #include "gimple.h"
51 #include "gimple-iterator.h"
52 #include "gimple-ssa.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "stringpool.h"
56 #include "tree-ssanames.h"
57 #include "hashtab.h"
58 #include "rtl.h"
59 #include "flags.h"
60 #include "statistics.h"
61 #include "real.h"
62 #include "fixed-value.h"
63 #include "insn-config.h"
64 #include "expmed.h"
65 #include "dojump.h"
66 #include "explow.h"
67 #include "calls.h"
68 #include "emit-rtl.h"
69 #include "varasm.h"
70 #include "stmt.h"
71 #include "expr.h"
72 #include "tree-dfa.h"
73 #include "timevar.h"
74 #include "dumpfile.h"
75 #include "tree-ssa-live.h"
76 #include "diagnostic-core.h"
77 #include "debug.h"
78 #include "tree-ssa.h"
79 #include "lto-streamer.h"
80 #include "ipa-ref.h"
81 #include "cgraph.h"
82 #include "ipa-utils.h"
83 #include "cfgloop.h"
84 
85 #ifdef ENABLE_CHECKING
86 static void  verify_live_on_entry (tree_live_info_p);
87 #endif
88 
89 
90 /* VARMAP maintains a mapping from SSA version number to real variables.
91 
92    All SSA_NAMES are divided into partitions.  Initially each ssa_name is the
93    only member of it's own partition.  Coalescing will attempt to group any
94    ssa_names which occur in a copy or in a PHI node into the same partition.
95 
96    At the end of out-of-ssa, each partition becomes a "real" variable and is
97    rewritten as a compiler variable.
98 
99    The var_map data structure is used to manage these partitions.  It allows
100    partitions to be combined, and determines which partition belongs to what
101    ssa_name or variable, and vice versa.  */
102 
103 
104 /* Hashtable helpers.  */
105 
106 struct tree_int_map_hasher : typed_noop_remove <tree_int_map>
107 {
108   typedef tree_int_map value_type;
109   typedef tree_int_map compare_type;
110   static inline hashval_t hash (const value_type *);
111   static inline bool equal (const value_type *, const compare_type *);
112 };
113 
114 inline hashval_t
115 tree_int_map_hasher::hash (const value_type *v)
116 {
117   return tree_map_base_hash (v);
118 }
119 
120 inline bool
121 tree_int_map_hasher::equal (const value_type *v, const compare_type *c)
122 {
123   return tree_int_map_eq (v, c);
124 }
125 
126 
127 /* This routine will initialize the basevar fields of MAP.  */
128 
129 static void
130 var_map_base_init (var_map map)
131 {
132   int x, num_part;
133   tree var;
134   struct tree_int_map *m, *mapstorage;
135 
136   num_part = num_var_partitions (map);
137   hash_table<tree_int_map_hasher> tree_to_index (num_part);
138   /* We can have at most num_part entries in the hash tables, so it's
139      enough to allocate so many map elements once, saving some malloc
140      calls.  */
141   mapstorage = m = XNEWVEC (struct tree_int_map, num_part);
142 
143   /* If a base table already exists, clear it, otherwise create it.  */
144   free (map->partition_to_base_index);
145   map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
146 
147   /* Build the base variable list, and point partitions at their bases.  */
148   for (x = 0; x < num_part; x++)
149     {
150       struct tree_int_map **slot;
151       unsigned baseindex;
152       var = partition_to_var (map, x);
153       if (SSA_NAME_VAR (var)
154 	  && (!VAR_P (SSA_NAME_VAR (var))
155 	      || !DECL_IGNORED_P (SSA_NAME_VAR (var))))
156 	m->base.from = SSA_NAME_VAR (var);
157       else
158 	/* This restricts what anonymous SSA names we can coalesce
159 	   as it restricts the sets we compute conflicts for.
160 	   Using TREE_TYPE to generate sets is the easies as
161 	   type equivalency also holds for SSA names with the same
162 	   underlying decl.
163 
164 	   Check gimple_can_coalesce_p when changing this code.  */
165 	m->base.from = (TYPE_CANONICAL (TREE_TYPE (var))
166 			? TYPE_CANONICAL (TREE_TYPE (var))
167 			: TREE_TYPE (var));
168       /* If base variable hasn't been seen, set it up.  */
169       slot = tree_to_index.find_slot (m, INSERT);
170       if (!*slot)
171 	{
172 	  baseindex = m - mapstorage;
173 	  m->to = baseindex;
174 	  *slot = m;
175 	  m++;
176 	}
177       else
178 	baseindex = (*slot)->to;
179       map->partition_to_base_index[x] = baseindex;
180     }
181 
182   map->num_basevars = m - mapstorage;
183 
184   free (mapstorage);
185 }
186 
187 
188 /* Remove the base table in MAP.  */
189 
190 static void
191 var_map_base_fini (var_map map)
192 {
193   /* Free the basevar info if it is present.  */
194   if (map->partition_to_base_index != NULL)
195     {
196       free (map->partition_to_base_index);
197       map->partition_to_base_index = NULL;
198       map->num_basevars = 0;
199     }
200 }
201 /* Create a variable partition map of SIZE, initialize and return it.  */
202 
203 var_map
204 init_var_map (int size)
205 {
206   var_map map;
207 
208   map = (var_map) xmalloc (sizeof (struct _var_map));
209   map->var_partition = partition_new (size);
210 
211   map->partition_to_view = NULL;
212   map->view_to_partition = NULL;
213   map->num_partitions = size;
214   map->partition_size = size;
215   map->num_basevars = 0;
216   map->partition_to_base_index = NULL;
217   return map;
218 }
219 
220 
221 /* Free memory associated with MAP.  */
222 
223 void
224 delete_var_map (var_map map)
225 {
226   var_map_base_fini (map);
227   partition_delete (map->var_partition);
228   free (map->partition_to_view);
229   free (map->view_to_partition);
230   free (map);
231 }
232 
233 
234 /* This function will combine the partitions in MAP for VAR1 and VAR2.  It
235    Returns the partition which represents the new partition.  If the two
236    partitions cannot be combined, NO_PARTITION is returned.  */
237 
238 int
239 var_union (var_map map, tree var1, tree var2)
240 {
241   int p1, p2, p3;
242 
243   gcc_assert (TREE_CODE (var1) == SSA_NAME);
244   gcc_assert (TREE_CODE (var2) == SSA_NAME);
245 
246   /* This is independent of partition_to_view. If partition_to_view is
247      on, then whichever one of these partitions is absorbed will never have a
248      dereference into the partition_to_view array any more.  */
249 
250   p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
251   p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
252 
253   gcc_assert (p1 != NO_PARTITION);
254   gcc_assert (p2 != NO_PARTITION);
255 
256   if (p1 == p2)
257     p3 = p1;
258   else
259     p3 = partition_union (map->var_partition, p1, p2);
260 
261   if (map->partition_to_view)
262     p3 = map->partition_to_view[p3];
263 
264   return p3;
265 }
266 
267 
268 /* Compress the partition numbers in MAP such that they fall in the range
269    0..(num_partitions-1) instead of wherever they turned out during
270    the partitioning exercise.  This removes any references to unused
271    partitions, thereby allowing bitmaps and other vectors to be much
272    denser.
273 
274    This is implemented such that compaction doesn't affect partitioning.
275    Ie., once partitions are created and possibly merged, running one
276    or more different kind of compaction will not affect the partitions
277    themselves.  Their index might change, but all the same variables will
278    still be members of the same partition group.  This allows work on reduced
279    sets, and no loss of information when a larger set is later desired.
280 
281    In particular, coalescing can work on partitions which have 2 or more
282    definitions, and then 'recompact' later to include all the single
283    definitions for assignment to program variables.  */
284 
285 
286 /* Set MAP back to the initial state of having no partition view.  Return a
287    bitmap which has a bit set for each partition number which is in use in the
288    varmap.  */
289 
290 static bitmap
291 partition_view_init (var_map map)
292 {
293   bitmap used;
294   int tmp;
295   unsigned int x;
296 
297   used = BITMAP_ALLOC (NULL);
298 
299   /* Already in a view? Abandon the old one.  */
300   if (map->partition_to_view)
301     {
302       free (map->partition_to_view);
303       map->partition_to_view = NULL;
304     }
305   if (map->view_to_partition)
306     {
307       free (map->view_to_partition);
308       map->view_to_partition = NULL;
309     }
310 
311   /* Find out which partitions are actually referenced.  */
312   for (x = 0; x < map->partition_size; x++)
313     {
314       tmp = partition_find (map->var_partition, x);
315       if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
316 	  && (!has_zero_uses (ssa_name (tmp))
317 	      || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
318 	bitmap_set_bit (used, tmp);
319     }
320 
321   map->num_partitions = map->partition_size;
322   return used;
323 }
324 
325 
326 /* This routine will finalize the view data for MAP based on the partitions
327    set in SELECTED.  This is either the same bitmap returned from
328    partition_view_init, or a trimmed down version if some of those partitions
329    were not desired in this view.  SELECTED is freed before returning.  */
330 
331 static void
332 partition_view_fini (var_map map, bitmap selected)
333 {
334   bitmap_iterator bi;
335   unsigned count, i, x, limit;
336 
337   gcc_assert (selected);
338 
339   count = bitmap_count_bits (selected);
340   limit = map->partition_size;
341 
342   /* If its a one-to-one ratio, we don't need any view compaction.  */
343   if (count < limit)
344     {
345       map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
346       memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
347       map->view_to_partition = (int *)xmalloc (count * sizeof (int));
348 
349       i = 0;
350       /* Give each selected partition an index.  */
351       EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
352 	{
353 	  map->partition_to_view[x] = i;
354 	  map->view_to_partition[i] = x;
355 	  i++;
356 	}
357       gcc_assert (i == count);
358       map->num_partitions = i;
359     }
360 
361   BITMAP_FREE (selected);
362 }
363 
364 
365 /* Create a partition view which includes all the used partitions in MAP.  If
366    WANT_BASES is true, create the base variable map as well.  */
367 
368 void
369 partition_view_normal (var_map map, bool want_bases)
370 {
371   bitmap used;
372 
373   used = partition_view_init (map);
374   partition_view_fini (map, used);
375 
376   if (want_bases)
377     var_map_base_init (map);
378   else
379     var_map_base_fini (map);
380 }
381 
382 
383 /* Create a partition view in MAP which includes just partitions which occur in
384    the bitmap ONLY. If WANT_BASES is true, create the base variable map
385    as well.  */
386 
387 void
388 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
389 {
390   bitmap used;
391   bitmap new_partitions = BITMAP_ALLOC (NULL);
392   unsigned x, p;
393   bitmap_iterator bi;
394 
395   used = partition_view_init (map);
396   EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
397     {
398       p = partition_find (map->var_partition, x);
399       gcc_assert (bitmap_bit_p (used, p));
400       bitmap_set_bit (new_partitions, p);
401     }
402   partition_view_fini (map, new_partitions);
403 
404   if (want_bases)
405     var_map_base_init (map);
406   else
407     var_map_base_fini (map);
408 }
409 
410 
411 static bitmap usedvars;
412 
413 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
414    Returns true if VAR wasn't marked before.  */
415 
416 static inline bool
417 set_is_used (tree var)
418 {
419   return bitmap_set_bit (usedvars, DECL_UID (var));
420 }
421 
422 /* Return true if VAR is marked as used.  */
423 
424 static inline bool
425 is_used_p (tree var)
426 {
427   return bitmap_bit_p (usedvars, DECL_UID (var));
428 }
429 
430 static inline void mark_all_vars_used (tree *);
431 
432 /* Helper function for mark_all_vars_used, called via walk_tree.  */
433 
434 static tree
435 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
436 {
437   tree t = *tp;
438   enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
439   tree b;
440 
441   if (TREE_CODE (t) == SSA_NAME)
442     {
443       *walk_subtrees = 0;
444       t = SSA_NAME_VAR (t);
445       if (!t)
446 	return NULL;
447     }
448 
449   if (IS_EXPR_CODE_CLASS (c)
450       && (b = TREE_BLOCK (t)) != NULL)
451     TREE_USED (b) = true;
452 
453   /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
454      fields do not contain vars.  */
455   if (TREE_CODE (t) == TARGET_MEM_REF)
456     {
457       mark_all_vars_used (&TMR_BASE (t));
458       mark_all_vars_used (&TMR_INDEX (t));
459       mark_all_vars_used (&TMR_INDEX2 (t));
460       *walk_subtrees = 0;
461       return NULL;
462     }
463 
464   /* Only need to mark VAR_DECLS; parameters and return results are not
465      eliminated as unused.  */
466   if (TREE_CODE (t) == VAR_DECL)
467     {
468       /* When a global var becomes used for the first time also walk its
469          initializer (non global ones don't have any).  */
470       if (set_is_used (t) && is_global_var (t)
471 	  && DECL_CONTEXT (t) == current_function_decl)
472 	mark_all_vars_used (&DECL_INITIAL (t));
473     }
474   /* remove_unused_scope_block_p requires information about labels
475      which are not DECL_IGNORED_P to tell if they might be used in the IL.  */
476   else if (TREE_CODE (t) == LABEL_DECL)
477     /* Although the TREE_USED values that the frontend uses would be
478        acceptable (albeit slightly over-conservative) for our purposes,
479        init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
480        must re-compute it here.  */
481     TREE_USED (t) = 1;
482 
483   if (IS_TYPE_OR_DECL_P (t))
484     *walk_subtrees = 0;
485 
486   return NULL;
487 }
488 
489 /* Mark the scope block SCOPE and its subblocks unused when they can be
490    possibly eliminated if dead.  */
491 
492 static void
493 mark_scope_block_unused (tree scope)
494 {
495   tree t;
496   TREE_USED (scope) = false;
497   if (!(*debug_hooks->ignore_block) (scope))
498     TREE_USED (scope) = true;
499   for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
500     mark_scope_block_unused (t);
501 }
502 
503 /* Look if the block is dead (by possibly eliminating its dead subblocks)
504    and return true if so.
505    Block is declared dead if:
506      1) No statements are associated with it.
507      2) Declares no live variables
508      3) All subblocks are dead
509 	or there is precisely one subblocks and the block
510 	has same abstract origin as outer block and declares
511 	no variables, so it is pure wrapper.
512    When we are not outputting full debug info, we also eliminate dead variables
513    out of scope blocks to let them to be recycled by GGC and to save copying work
514    done by the inliner.  */
515 
516 static bool
517 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
518 {
519   tree *t, *next;
520   bool unused = !TREE_USED (scope);
521   int nsubblocks = 0;
522 
523   /* For ipa-polymorphic-call.c purposes, preserve blocks:
524      1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones  */
525   if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
526     {
527       in_ctor_dtor_block = true;
528       unused = false;
529     }
530   /* 2) inside such blocks, the outermost block with BLOCK_ABSTRACT_ORIGIN
531      being a FUNCTION_DECL.  */
532   else if (in_ctor_dtor_block
533 	   && BLOCK_ABSTRACT_ORIGIN (scope)
534 	   && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (scope)) == FUNCTION_DECL)
535     {
536       in_ctor_dtor_block = false;
537       unused = false;
538     }
539 
540   for (t = &BLOCK_VARS (scope); *t; t = next)
541     {
542       next = &DECL_CHAIN (*t);
543 
544       /* Debug info of nested function refers to the block of the
545 	 function.  We might stil call it even if all statements
546 	 of function it was nested into was elliminated.
547 
548 	 TODO: We can actually look into cgraph to see if function
549 	 will be output to file.  */
550       if (TREE_CODE (*t) == FUNCTION_DECL)
551 	unused = false;
552 
553       /* If a decl has a value expr, we need to instantiate it
554 	 regardless of debug info generation, to avoid codegen
555 	 differences in memory overlap tests.  update_equiv_regs() may
556 	 indirectly call validate_equiv_mem() to test whether a
557 	 SET_DEST overlaps with others, and if the value expr changes
558 	 by virtual register instantiation, we may get end up with
559 	 different results.  */
560       else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
561 	unused = false;
562 
563       /* Remove everything we don't generate debug info for.  */
564       else if (DECL_IGNORED_P (*t))
565 	{
566 	  *t = DECL_CHAIN (*t);
567 	  next = t;
568 	}
569 
570       /* When we are outputting debug info, we usually want to output
571 	 info about optimized-out variables in the scope blocks.
572 	 Exception are the scope blocks not containing any instructions
573 	 at all so user can't get into the scopes at first place.  */
574       else if (is_used_p (*t))
575 	unused = false;
576       else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
577 	/* For labels that are still used in the IL, the decision to
578 	   preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
579 	   risk having different ordering in debug vs.  non-debug builds
580 	   during inlining or versioning.
581 	   A label appearing here (we have already checked DECL_IGNORED_P)
582 	   should not be used in the IL unless it has been explicitly used
583 	   before, so we use TREE_USED as an approximation.  */
584 	/* In principle, we should do the same here as for the debug case
585 	   below, however, when debugging, there might be additional nested
586 	   levels that keep an upper level with a label live, so we have to
587 	   force this block to be considered used, too.  */
588 	unused = false;
589 
590       /* When we are not doing full debug info, we however can keep around
591 	 only the used variables for cfgexpand's memory packing saving quite
592 	 a lot of memory.
593 
594 	 For sake of -g3, we keep around those vars but we don't count this as
595 	 use of block, so innermost block with no used vars and no instructions
596 	 can be considered dead.  We only want to keep around blocks user can
597 	 breakpoint into and ask about value of optimized out variables.
598 
599 	 Similarly we need to keep around types at least until all
600 	 variables of all nested blocks are gone.  We track no
601 	 information on whether given type is used or not, so we have
602 	 to keep them even when not emitting debug information,
603 	 otherwise we may end up remapping variables and their (local)
604 	 types in different orders depending on whether debug
605 	 information is being generated.  */
606 
607       else if (TREE_CODE (*t) == TYPE_DECL
608 	       || debug_info_level == DINFO_LEVEL_NORMAL
609 	       || debug_info_level == DINFO_LEVEL_VERBOSE)
610 	;
611       else
612 	{
613 	  *t = DECL_CHAIN (*t);
614 	  next = t;
615 	}
616     }
617 
618   for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
619     if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
620       {
621 	if (BLOCK_SUBBLOCKS (*t))
622 	  {
623 	    tree next = BLOCK_CHAIN (*t);
624 	    tree supercontext = BLOCK_SUPERCONTEXT (*t);
625 
626 	    *t = BLOCK_SUBBLOCKS (*t);
627 	    while (BLOCK_CHAIN (*t))
628 	      {
629 	        BLOCK_SUPERCONTEXT (*t) = supercontext;
630 	        t = &BLOCK_CHAIN (*t);
631 	      }
632 	    BLOCK_CHAIN (*t) = next;
633 	    BLOCK_SUPERCONTEXT (*t) = supercontext;
634 	    t = &BLOCK_CHAIN (*t);
635 	    nsubblocks ++;
636 	  }
637 	else
638 	  *t = BLOCK_CHAIN (*t);
639       }
640     else
641       {
642         t = &BLOCK_CHAIN (*t);
643 	nsubblocks ++;
644       }
645 
646 
647    if (!unused)
648      ;
649    /* Outer scope is always used.  */
650    else if (!BLOCK_SUPERCONTEXT (scope)
651             || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
652      unused = false;
653    /* Innermost blocks with no live variables nor statements can be always
654       eliminated.  */
655    else if (!nsubblocks)
656      ;
657    /* When not generating debug info we can eliminate info on unused
658       variables.  */
659    else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE)
660      {
661        /* Even for -g0 don't prune outer scopes from artificial
662 	  functions, otherwise diagnostics using tree_nonartificial_location
663 	  will not be emitted properly.  */
664        if (inlined_function_outer_scope_p (scope))
665 	 {
666 	   tree ao = scope;
667 
668 	   while (ao
669 		  && TREE_CODE (ao) == BLOCK
670 		  && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
671 	     ao = BLOCK_ABSTRACT_ORIGIN (ao);
672 	   if (ao
673 	       && TREE_CODE (ao) == FUNCTION_DECL
674 	       && DECL_DECLARED_INLINE_P (ao)
675 	       && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
676 	     unused = false;
677 	 }
678      }
679    else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
680      unused = false;
681    /* See if this block is important for representation of inlined function.
682       Inlined functions are always represented by block with
683       block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
684       set...  */
685    else if (inlined_function_outer_scope_p (scope))
686      unused = false;
687    else
688    /* Verfify that only blocks with source location set
689       are entry points to the inlined functions.  */
690      gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
691 		 == UNKNOWN_LOCATION);
692 
693    TREE_USED (scope) = !unused;
694    return unused;
695 }
696 
697 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
698    eliminated during the tree->rtl conversion process.  */
699 
700 static inline void
701 mark_all_vars_used (tree *expr_p)
702 {
703   walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
704 }
705 
706 /* Helper function for clear_unused_block_pointer, called via walk_tree.  */
707 
708 static tree
709 clear_unused_block_pointer_1 (tree *tp, int *, void *)
710 {
711   if (EXPR_P (*tp) && TREE_BLOCK (*tp)
712       && !TREE_USED (TREE_BLOCK (*tp)))
713     TREE_SET_BLOCK (*tp, NULL);
714   return NULL_TREE;
715 }
716 
717 /* Set all block pointer in debug or clobber stmt to NULL if the block
718    is unused, so that they will not be streamed out.  */
719 
720 static void
721 clear_unused_block_pointer (void)
722 {
723   basic_block bb;
724   gimple_stmt_iterator gsi;
725 
726   FOR_EACH_BB_FN (bb, cfun)
727     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
728       {
729 	unsigned i;
730 	tree b;
731 	gimple stmt = gsi_stmt (gsi);
732 
733 	if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
734 	  continue;
735 	b = gimple_block (stmt);
736 	if (b && !TREE_USED (b))
737 	  gimple_set_block (stmt, NULL);
738 	for (i = 0; i < gimple_num_ops (stmt); i++)
739 	  walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
740 		     NULL, NULL);
741       }
742 }
743 
744 /* Dump scope blocks starting at SCOPE to FILE.  INDENT is the
745    indentation level and FLAGS is as in print_generic_expr.  */
746 
747 static void
748 dump_scope_block (FILE *file, int indent, tree scope, int flags)
749 {
750   tree var, t;
751   unsigned int i;
752 
753   fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
754   	   TREE_USED (scope) ? "" : " (unused)",
755 	   BLOCK_ABSTRACT (scope) ? " (abstract)": "");
756   if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
757     {
758       expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
759       fprintf (file, " %s:%i", s.file, s.line);
760     }
761   if (BLOCK_ABSTRACT_ORIGIN (scope))
762     {
763       tree origin = block_ultimate_origin (scope);
764       if (origin)
765 	{
766 	  fprintf (file, " Originating from :");
767 	  if (DECL_P (origin))
768 	    print_generic_decl (file, origin, flags);
769 	  else
770 	    fprintf (file, "#%i", BLOCK_NUMBER (origin));
771 	}
772     }
773   fprintf (file, " \n");
774   for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
775     {
776       fprintf (file, "%*s", indent, "");
777       print_generic_decl (file, var, flags);
778       fprintf (file, "\n");
779     }
780   for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
781     {
782       fprintf (file, "%*s",indent, "");
783       print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
784       			  flags);
785       fprintf (file, " (nonlocalized)\n");
786     }
787   for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
788     dump_scope_block (file, indent + 2, t, flags);
789   fprintf (file, "\n%*s}\n",indent, "");
790 }
791 
792 /* Dump the tree of lexical scopes starting at SCOPE to stderr.  FLAGS
793    is as in print_generic_expr.  */
794 
795 DEBUG_FUNCTION void
796 debug_scope_block (tree scope, int flags)
797 {
798   dump_scope_block (stderr, 0, scope, flags);
799 }
800 
801 
802 /* Dump the tree of lexical scopes of current_function_decl to FILE.
803    FLAGS is as in print_generic_expr.  */
804 
805 void
806 dump_scope_blocks (FILE *file, int flags)
807 {
808   dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
809 }
810 
811 
812 /* Dump the tree of lexical scopes of current_function_decl to stderr.
813    FLAGS is as in print_generic_expr.  */
814 
815 DEBUG_FUNCTION void
816 debug_scope_blocks (int flags)
817 {
818   dump_scope_blocks (stderr, flags);
819 }
820 
821 /* Remove local variables that are not referenced in the IL.  */
822 
823 void
824 remove_unused_locals (void)
825 {
826   basic_block bb;
827   tree var;
828   unsigned srcidx, dstidx, num;
829   bool have_local_clobbers = false;
830 
831   /* Removing declarations from lexical blocks when not optimizing is
832      not only a waste of time, it actually causes differences in stack
833      layout.  */
834   if (!optimize)
835     return;
836 
837   timevar_push (TV_REMOVE_UNUSED);
838 
839   mark_scope_block_unused (DECL_INITIAL (current_function_decl));
840 
841   usedvars = BITMAP_ALLOC (NULL);
842 
843   /* Walk the CFG marking all referenced symbols.  */
844   FOR_EACH_BB_FN (bb, cfun)
845     {
846       gimple_stmt_iterator gsi;
847       size_t i;
848       edge_iterator ei;
849       edge e;
850 
851       /* Walk the statements.  */
852       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
853 	{
854 	  gimple stmt = gsi_stmt (gsi);
855 	  tree b = gimple_block (stmt);
856 
857 	  if (is_gimple_debug (stmt))
858 	    continue;
859 
860 	  if (gimple_clobber_p (stmt))
861 	    {
862 	      have_local_clobbers = true;
863 	      continue;
864 	    }
865 
866 	  if (b)
867 	    TREE_USED (b) = true;
868 
869 	  for (i = 0; i < gimple_num_ops (stmt); i++)
870 	    mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
871 	}
872 
873       for (gphi_iterator gpi = gsi_start_phis (bb);
874 	   !gsi_end_p (gpi);
875 	   gsi_next (&gpi))
876         {
877           use_operand_p arg_p;
878           ssa_op_iter i;
879 	  tree def;
880 	  gphi *phi = gpi.phi ();
881 
882 	  if (virtual_operand_p (gimple_phi_result (phi)))
883 	    continue;
884 
885 	  def = gimple_phi_result (phi);
886 	  mark_all_vars_used (&def);
887 
888           FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
889             {
890 	      tree arg = USE_FROM_PTR (arg_p);
891 	      int index = PHI_ARG_INDEX_FROM_USE (arg_p);
892 	      tree block =
893 		LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
894 	      if (block != NULL)
895 		TREE_USED (block) = true;
896 	      mark_all_vars_used (&arg);
897             }
898         }
899 
900       FOR_EACH_EDGE (e, ei, bb->succs)
901 	if (LOCATION_BLOCK (e->goto_locus) != NULL)
902 	  TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
903     }
904 
905   /* We do a two-pass approach about the out-of-scope clobbers.  We want
906      to remove them if they are the only references to a local variable,
907      but we want to retain them when there's any other.  So the first pass
908      ignores them, and the second pass (if there were any) tries to remove
909      them.  */
910   if (have_local_clobbers)
911     FOR_EACH_BB_FN (bb, cfun)
912       {
913 	gimple_stmt_iterator gsi;
914 
915 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
916 	  {
917 	    gimple stmt = gsi_stmt (gsi);
918 	    tree b = gimple_block (stmt);
919 
920 	    if (gimple_clobber_p (stmt))
921 	      {
922 		tree lhs = gimple_assign_lhs (stmt);
923 		tree base = get_base_address (lhs);
924 		/* Remove clobbers referencing unused vars, or clobbers
925 		   with MEM_REF lhs referencing uninitialized pointers.  */
926 		if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base))
927 		    || (TREE_CODE (lhs) == MEM_REF
928 			&& TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
929 			&& SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
930 			&& (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
931 			    != PARM_DECL)))
932 		  {
933 		    unlink_stmt_vdef (stmt);
934 		    gsi_remove (&gsi, true);
935 		    release_defs (stmt);
936 		    continue;
937 		  }
938 		if (b)
939 		  TREE_USED (b) = true;
940 	      }
941 	    gsi_next (&gsi);
942 	  }
943       }
944 
945   if (cfun->has_simduid_loops)
946     {
947       struct loop *loop;
948       FOR_EACH_LOOP (loop, 0)
949 	if (loop->simduid && !is_used_p (loop->simduid))
950 	  loop->simduid = NULL_TREE;
951     }
952 
953   cfun->has_local_explicit_reg_vars = false;
954 
955   /* Remove unmarked local and global vars from local_decls.  */
956   num = vec_safe_length (cfun->local_decls);
957   for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
958     {
959       var = (*cfun->local_decls)[srcidx];
960       if (TREE_CODE (var) == VAR_DECL)
961 	{
962 	  if (!is_used_p (var))
963 	    {
964 	      tree def;
965 	      if (cfun->nonlocal_goto_save_area
966 		  && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
967 		cfun->nonlocal_goto_save_area = NULL;
968 	      /* Release any default def associated with var.  */
969 	      if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
970 		{
971 		  set_ssa_default_def (cfun, var, NULL_TREE);
972 		  release_ssa_name (def);
973 		}
974 	      continue;
975 	    }
976 	}
977       if (TREE_CODE (var) == VAR_DECL
978 	  && DECL_HARD_REGISTER (var)
979 	  && !is_global_var (var))
980 	cfun->has_local_explicit_reg_vars = true;
981 
982       if (srcidx != dstidx)
983 	(*cfun->local_decls)[dstidx] = var;
984       dstidx++;
985     }
986   if (dstidx != num)
987     {
988       statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
989       cfun->local_decls->truncate (dstidx);
990     }
991 
992   remove_unused_scope_block_p (DECL_INITIAL (current_function_decl), false);
993   clear_unused_block_pointer ();
994 
995   BITMAP_FREE (usedvars);
996 
997   if (dump_file && (dump_flags & TDF_DETAILS))
998     {
999       fprintf (dump_file, "Scope blocks after cleanups:\n");
1000       dump_scope_blocks (dump_file, dump_flags);
1001     }
1002 
1003   timevar_pop (TV_REMOVE_UNUSED);
1004 }
1005 
1006 /* Allocate and return a new live range information object base on MAP.  */
1007 
1008 static tree_live_info_p
1009 new_tree_live_info (var_map map)
1010 {
1011   tree_live_info_p live;
1012   basic_block bb;
1013 
1014   live = XNEW (struct tree_live_info_d);
1015   live->map = map;
1016   live->num_blocks = last_basic_block_for_fn (cfun);
1017 
1018   bitmap_obstack_initialize (&live->livein_obstack);
1019   bitmap_obstack_initialize (&live->liveout_obstack);
1020   live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1021   FOR_EACH_BB_FN (bb, cfun)
1022     bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
1023 
1024   live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1025   FOR_EACH_BB_FN (bb, cfun)
1026     bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
1027 
1028   live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
1029   live->stack_top = live->work_stack;
1030 
1031   live->global = BITMAP_ALLOC (NULL);
1032   return live;
1033 }
1034 
1035 
1036 /* Free storage for live range info object LIVE.  */
1037 
1038 void
1039 delete_tree_live_info (tree_live_info_p live)
1040 {
1041   if (live->livein)
1042     {
1043       bitmap_obstack_release (&live->livein_obstack);
1044       free (live->livein);
1045     }
1046   if (live->liveout)
1047     {
1048       bitmap_obstack_release (&live->liveout_obstack);
1049       free (live->liveout);
1050     }
1051   BITMAP_FREE (live->global);
1052   free (live->work_stack);
1053   free (live);
1054 }
1055 
1056 
1057 /* Visit basic block BB and propagate any required live on entry bits from
1058    LIVE into the predecessors.  VISITED is the bitmap of visited blocks.
1059    TMP is a temporary work bitmap which is passed in to avoid reallocating
1060    it each time.  */
1061 
1062 static void
1063 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
1064 {
1065   edge e;
1066   bool change;
1067   edge_iterator ei;
1068   basic_block pred_bb;
1069   bitmap loe;
1070 
1071   gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
1072   bitmap_set_bit (visited, bb->index);
1073 
1074   loe = live_on_entry (live, bb);
1075 
1076   FOR_EACH_EDGE (e, ei, bb->preds)
1077     {
1078       pred_bb = e->src;
1079       if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1080 	continue;
1081       /* Variables live-on-entry from BB that aren't defined in the
1082 	 predecessor block.  This should be the live on entry vars to pred.
1083 	 Note that liveout is the DEFs in a block while live on entry is
1084 	 being calculated.
1085 	 Add these bits to live-on-entry for the pred. if there are any
1086 	 changes, and pred_bb has been visited already, add it to the
1087 	 revisit stack.  */
1088       change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
1089 					  loe, &live->liveout[pred_bb->index]);
1090       if (change
1091 	  && bitmap_bit_p (visited, pred_bb->index))
1092 	{
1093 	  bitmap_clear_bit (visited, pred_bb->index);
1094 	  *(live->stack_top)++ = pred_bb->index;
1095 	}
1096     }
1097 }
1098 
1099 
1100 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1101    of all the variables.  */
1102 
1103 static void
1104 live_worklist (tree_live_info_p live)
1105 {
1106   unsigned b;
1107   basic_block bb;
1108   sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
1109 
1110   bitmap_clear (visited);
1111 
1112   /* Visit all the blocks in reverse order and propagate live on entry values
1113      into the predecessors blocks.  */
1114   FOR_EACH_BB_REVERSE_FN (bb, cfun)
1115     loe_visit_block (live, bb, visited);
1116 
1117   /* Process any blocks which require further iteration.  */
1118   while (live->stack_top != live->work_stack)
1119     {
1120       b = *--(live->stack_top);
1121       loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1122     }
1123 
1124   sbitmap_free (visited);
1125 }
1126 
1127 
1128 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1129    links.  Set the live on entry fields in LIVE.  Def's are marked temporarily
1130    in the liveout vector.  */
1131 
1132 static void
1133 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1134 {
1135   int p;
1136   gimple stmt;
1137   use_operand_p use;
1138   basic_block def_bb = NULL;
1139   imm_use_iterator imm_iter;
1140   bool global = false;
1141 
1142   p = var_to_partition (live->map, ssa_name);
1143   if (p == NO_PARTITION)
1144     return;
1145 
1146   stmt = SSA_NAME_DEF_STMT (ssa_name);
1147   if (stmt)
1148     {
1149       def_bb = gimple_bb (stmt);
1150       /* Mark defs in liveout bitmap temporarily.  */
1151       if (def_bb)
1152 	bitmap_set_bit (&live->liveout[def_bb->index], p);
1153     }
1154   else
1155     def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1156 
1157   /* An undefined local variable does not need to be very alive.  */
1158   if (ssa_undefined_value_p (ssa_name, false))
1159     return;
1160 
1161   /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1162      add it to the list of live on entry blocks.  */
1163   FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1164     {
1165       gimple use_stmt = USE_STMT (use);
1166       basic_block add_block = NULL;
1167 
1168       if (gimple_code (use_stmt) == GIMPLE_PHI)
1169         {
1170 	  /* Uses in PHI's are considered to be live at exit of the SRC block
1171 	     as this is where a copy would be inserted.  Check to see if it is
1172 	     defined in that block, or whether its live on entry.  */
1173 	  int index = PHI_ARG_INDEX_FROM_USE (use);
1174 	  edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1175 	  if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1176 	    {
1177 	      if (e->src != def_bb)
1178 		add_block = e->src;
1179 	    }
1180 	}
1181       else if (is_gimple_debug (use_stmt))
1182 	continue;
1183       else
1184         {
1185 	  /* If its not defined in this block, its live on entry.  */
1186 	  basic_block use_bb = gimple_bb (use_stmt);
1187 	  if (use_bb != def_bb)
1188 	    add_block = use_bb;
1189 	}
1190 
1191       /* If there was a live on entry use, set the bit.  */
1192       if (add_block)
1193         {
1194 	  global = true;
1195 	  bitmap_set_bit (&live->livein[add_block->index], p);
1196 	}
1197     }
1198 
1199   /* If SSA_NAME is live on entry to at least one block, fill in all the live
1200      on entry blocks between the def and all the uses.  */
1201   if (global)
1202     bitmap_set_bit (live->global, p);
1203 }
1204 
1205 
1206 /* Calculate the live on exit vectors based on the entry info in LIVEINFO.  */
1207 
1208 static void
1209 calculate_live_on_exit (tree_live_info_p liveinfo)
1210 {
1211   basic_block bb;
1212   edge e;
1213   edge_iterator ei;
1214 
1215   /* live on entry calculations used liveout vectors for defs, clear them.  */
1216   FOR_EACH_BB_FN (bb, cfun)
1217     bitmap_clear (&liveinfo->liveout[bb->index]);
1218 
1219   /* Set all the live-on-exit bits for uses in PHIs.  */
1220   FOR_EACH_BB_FN (bb, cfun)
1221     {
1222       gphi_iterator gsi;
1223       size_t i;
1224 
1225       /* Mark the PHI arguments which are live on exit to the pred block.  */
1226       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1227 	{
1228 	  gphi *phi = gsi.phi ();
1229 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1230 	    {
1231 	      tree t = PHI_ARG_DEF (phi, i);
1232 	      int p;
1233 
1234 	      if (TREE_CODE (t) != SSA_NAME)
1235 		continue;
1236 
1237 	      p = var_to_partition (liveinfo->map, t);
1238 	      if (p == NO_PARTITION)
1239 		continue;
1240 	      e = gimple_phi_arg_edge (phi, i);
1241 	      if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1242 		bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1243 	    }
1244 	}
1245 
1246       /* Add each successors live on entry to this bock live on exit.  */
1247       FOR_EACH_EDGE (e, ei, bb->succs)
1248 	if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1249 	  bitmap_ior_into (&liveinfo->liveout[bb->index],
1250 			   live_on_entry (liveinfo, e->dest));
1251     }
1252 }
1253 
1254 
1255 /* Given partition map MAP, calculate all the live on entry bitmaps for
1256    each partition.  Return a new live info object.  */
1257 
1258 tree_live_info_p
1259 calculate_live_ranges (var_map map, bool want_livein)
1260 {
1261   tree var;
1262   unsigned i;
1263   tree_live_info_p live;
1264 
1265   live = new_tree_live_info (map);
1266   for (i = 0; i < num_var_partitions (map); i++)
1267     {
1268       var = partition_to_var (map, i);
1269       if (var != NULL_TREE)
1270 	set_var_live_on_entry (var, live);
1271     }
1272 
1273   live_worklist (live);
1274 
1275 #ifdef ENABLE_CHECKING
1276   verify_live_on_entry (live);
1277 #endif
1278 
1279   calculate_live_on_exit (live);
1280 
1281   if (!want_livein)
1282     {
1283       bitmap_obstack_release (&live->livein_obstack);
1284       free (live->livein);
1285       live->livein = NULL;
1286     }
1287 
1288   return live;
1289 }
1290 
1291 
1292 /* Output partition map MAP to file F.  */
1293 
1294 void
1295 dump_var_map (FILE *f, var_map map)
1296 {
1297   int t;
1298   unsigned x, y;
1299   int p;
1300 
1301   fprintf (f, "\nPartition map \n\n");
1302 
1303   for (x = 0; x < map->num_partitions; x++)
1304     {
1305       if (map->view_to_partition != NULL)
1306 	p = map->view_to_partition[x];
1307       else
1308 	p = x;
1309 
1310       if (ssa_name (p) == NULL_TREE
1311 	  || virtual_operand_p (ssa_name (p)))
1312         continue;
1313 
1314       t = 0;
1315       for (y = 1; y < num_ssa_names; y++)
1316         {
1317 	  p = partition_find (map->var_partition, y);
1318 	  if (map->partition_to_view)
1319 	    p = map->partition_to_view[p];
1320 	  if (p == (int)x)
1321 	    {
1322 	      if (t++ == 0)
1323 	        {
1324 		  fprintf (f, "Partition %d (", x);
1325 		  print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1326 		  fprintf (f, " - ");
1327 		}
1328 	      fprintf (f, "%d ", y);
1329 	    }
1330 	}
1331       if (t != 0)
1332 	fprintf (f, ")\n");
1333     }
1334   fprintf (f, "\n");
1335 }
1336 
1337 
1338 /* Generic dump for the above.  */
1339 
1340 DEBUG_FUNCTION void
1341 debug (_var_map &ref)
1342 {
1343   dump_var_map (stderr, &ref);
1344 }
1345 
1346 DEBUG_FUNCTION void
1347 debug (_var_map *ptr)
1348 {
1349   if (ptr)
1350     debug (*ptr);
1351   else
1352     fprintf (stderr, "<nil>\n");
1353 }
1354 
1355 
1356 /* Output live range info LIVE to file F, controlled by FLAG.  */
1357 
1358 void
1359 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1360 {
1361   basic_block bb;
1362   unsigned i;
1363   var_map map = live->map;
1364   bitmap_iterator bi;
1365 
1366   if ((flag & LIVEDUMP_ENTRY) && live->livein)
1367     {
1368       FOR_EACH_BB_FN (bb, cfun)
1369 	{
1370 	  fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1371 	  EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1372 	    {
1373 	      print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1374 	      fprintf (f, "  ");
1375 	    }
1376 	  fprintf (f, "\n");
1377 	}
1378     }
1379 
1380   if ((flag & LIVEDUMP_EXIT) && live->liveout)
1381     {
1382       FOR_EACH_BB_FN (bb, cfun)
1383 	{
1384 	  fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1385 	  EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1386 	    {
1387 	      print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1388 	      fprintf (f, "  ");
1389 	    }
1390 	  fprintf (f, "\n");
1391 	}
1392     }
1393 }
1394 
1395 
1396 /* Generic dump for the above.  */
1397 
1398 DEBUG_FUNCTION void
1399 debug (tree_live_info_d &ref)
1400 {
1401   dump_live_info (stderr, &ref, 0);
1402 }
1403 
1404 DEBUG_FUNCTION void
1405 debug (tree_live_info_d *ptr)
1406 {
1407   if (ptr)
1408     debug (*ptr);
1409   else
1410     fprintf (stderr, "<nil>\n");
1411 }
1412 
1413 
1414 #ifdef ENABLE_CHECKING
1415 /* Verify that SSA_VAR is a non-virtual SSA_NAME.  */
1416 
1417 void
1418 register_ssa_partition_check (tree ssa_var)
1419 {
1420   gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1421   if (virtual_operand_p (ssa_var))
1422     {
1423       fprintf (stderr, "Illegally registering a virtual SSA name :");
1424       print_generic_expr (stderr, ssa_var, TDF_SLIM);
1425       fprintf (stderr, " in the SSA->Normal phase.\n");
1426       internal_error ("SSA corruption");
1427     }
1428 }
1429 
1430 
1431 /* Verify that the info in LIVE matches the current cfg.  */
1432 
1433 static void
1434 verify_live_on_entry (tree_live_info_p live)
1435 {
1436   unsigned i;
1437   tree var;
1438   gimple stmt;
1439   basic_block bb;
1440   edge e;
1441   int num;
1442   edge_iterator ei;
1443   var_map map = live->map;
1444 
1445    /* Check for live on entry partitions and report those with a DEF in
1446       the program. This will typically mean an optimization has done
1447       something wrong.  */
1448   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1449   num = 0;
1450   FOR_EACH_EDGE (e, ei, bb->succs)
1451     {
1452       int entry_block = e->dest->index;
1453       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1454         continue;
1455       for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1456 	{
1457 	  basic_block tmp;
1458 	  tree d = NULL_TREE;
1459 	  bitmap loe;
1460 	  var = partition_to_var (map, i);
1461 	  stmt = SSA_NAME_DEF_STMT (var);
1462 	  tmp = gimple_bb (stmt);
1463 	  if (SSA_NAME_VAR (var))
1464 	    d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1465 
1466 	  loe = live_on_entry (live, e->dest);
1467 	  if (loe && bitmap_bit_p (loe, i))
1468 	    {
1469 	      if (!gimple_nop_p (stmt))
1470 		{
1471 		  num++;
1472 		  print_generic_expr (stderr, var, TDF_SLIM);
1473 		  fprintf (stderr, " is defined ");
1474 		  if (tmp)
1475 		    fprintf (stderr, " in BB%d, ", tmp->index);
1476 		  fprintf (stderr, "by:\n");
1477 		  print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1478 		  fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1479 			   entry_block);
1480 		  fprintf (stderr, " So it appears to have multiple defs.\n");
1481 		}
1482 	      else
1483 	        {
1484 		  if (d != var)
1485 		    {
1486 		      num++;
1487 		      print_generic_expr (stderr, var, TDF_SLIM);
1488 		      fprintf (stderr, " is live-on-entry to BB%d ",
1489 			       entry_block);
1490 		      if (d)
1491 		        {
1492 			  fprintf (stderr, " but is not the default def of ");
1493 			  print_generic_expr (stderr, d, TDF_SLIM);
1494 			  fprintf (stderr, "\n");
1495 			}
1496 		      else
1497 			fprintf (stderr, " and there is no default def.\n");
1498 		    }
1499 		}
1500 	    }
1501 	  else
1502 	    if (d == var)
1503 	      {
1504 		/* An undefined local variable does not need to be very
1505 		   alive.  */
1506 		if (ssa_undefined_value_p (var, false))
1507 		  continue;
1508 
1509 		/* The only way this var shouldn't be marked live on entry is
1510 		   if it occurs in a PHI argument of the block.  */
1511 		size_t z;
1512 		bool ok = false;
1513 		gphi_iterator gsi;
1514 		for (gsi = gsi_start_phis (e->dest);
1515 		     !gsi_end_p (gsi) && !ok;
1516 		     gsi_next (&gsi))
1517 		  {
1518 		    gphi *phi = gsi.phi ();
1519 		    for (z = 0; z < gimple_phi_num_args (phi); z++)
1520 		      if (var == gimple_phi_arg_def (phi, z))
1521 			{
1522 			  ok = true;
1523 			  break;
1524 			}
1525 		  }
1526 		if (ok)
1527 		  continue;
1528 	        num++;
1529 		print_generic_expr (stderr, var, TDF_SLIM);
1530 		fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1531 			 entry_block);
1532 		fprintf (stderr, "but it is a default def so it should be.\n");
1533 	      }
1534 	}
1535     }
1536   gcc_assert (num <= 0);
1537 }
1538 #endif
1539