xref: /openbsd-src/gnu/usr.bin/gcc/gcc/flow.c (revision 016dfe1afca3898759dfc06c82acbe86f2c39560)
1 /* Data flow analysis for GNU compiler.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2004 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 /* This file contains the data flow analysis pass of the compiler.  It
23    computes data flow information which tells combine_instructions
24    which insns to consider combining and controls register allocation.
25 
26    Additional data flow information that is too bulky to record is
27    generated during the analysis, and is used at that time to create
28    autoincrement and autodecrement addressing.
29 
30    The first step is dividing the function into basic blocks.
31    find_basic_blocks does this.  Then life_analysis determines
32    where each register is live and where it is dead.
33 
34    ** find_basic_blocks **
35 
36    find_basic_blocks divides the current function's rtl into basic
37    blocks and constructs the CFG.  The blocks are recorded in the
38    basic_block_info array; the CFG exists in the edge structures
39    referenced by the blocks.
40 
41    find_basic_blocks also finds any unreachable loops and deletes them.
42 
43    ** life_analysis **
44 
45    life_analysis is called immediately after find_basic_blocks.
46    It uses the basic block information to determine where each
47    hard or pseudo register is live.
48 
49    ** live-register info **
50 
51    The information about where each register is live is in two parts:
52    the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53 
54    basic_block->global_live_at_start has an element for each basic
55    block, and the element is a bit-vector with a bit for each hard or
56    pseudo register.  The bit is 1 if the register is live at the
57    beginning of the basic block.
58 
59    Two types of elements can be added to an insn's REG_NOTES.
60    A REG_DEAD note is added to an insn's REG_NOTES for any register
61    that meets both of two conditions:  The value in the register is not
62    needed in subsequent insns and the insn does not replace the value in
63    the register (in the case of multi-word hard registers, the value in
64    each register must be replaced by the insn to avoid a REG_DEAD note).
65 
66    In the vast majority of cases, an object in a REG_DEAD note will be
67    used somewhere in the insn.  The (rare) exception to this is if an
68    insn uses a multi-word hard register and only some of the registers are
69    needed in subsequent insns.  In that case, REG_DEAD notes will be
70    provided for those hard registers that are not subsequently needed.
71    Partial REG_DEAD notes of this type do not occur when an insn sets
72    only some of the hard registers used in such a multi-word operand;
73    omitting REG_DEAD notes for objects stored in an insn is optional and
74    the desire to do so does not justify the complexity of the partial
75    REG_DEAD notes.
76 
77    REG_UNUSED notes are added for each register that is set by the insn
78    but is unused subsequently (if every register set by the insn is unused
79    and the insn does not reference memory or have some other side-effect,
80    the insn is deleted instead).  If only part of a multi-word hard
81    register is used in a subsequent insn, REG_UNUSED notes are made for
82    the parts that will not be used.
83 
84    To determine which registers are live after any insn, one can
85    start from the beginning of the basic block and scan insns, noting
86    which registers are set by each insn and which die there.
87 
88    ** Other actions of life_analysis **
89 
90    life_analysis sets up the LOG_LINKS fields of insns because the
91    information needed to do so is readily available.
92 
93    life_analysis deletes insns whose only effect is to store a value
94    that is never used.
95 
96    life_analysis notices cases where a reference to a register as
97    a memory address can be combined with a preceding or following
98    incrementation or decrementation of the register.  The separate
99    instruction to increment or decrement is deleted and the address
100    is changed to a POST_INC or similar rtx.
101 
102    Each time an incrementing or decrementing address is created,
103    a REG_INC element is added to the insn's REG_NOTES list.
104 
105    life_analysis fills in certain vectors containing information about
106    register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107    REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108 
109    life_analysis sets current_function_sp_is_unchanging if the function
110    doesn't modify the stack pointer.  */
111 
112 /* TODO:
113 
114    Split out from life_analysis:
115 	- local property discovery (bb->local_live, bb->local_set)
116 	- global property computation
117 	- log links creation
118 	- pre/post modify transformation
119 */
120 
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
138 #include "timevar.h"
139 
140 #include "obstack.h"
141 #include "splay-tree.h"
142 
143 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
144    the stack pointer does not matter.  The value is tested only in
145    functions that have frame pointers.
146    No definition is equivalent to always zero.  */
147 #ifndef EXIT_IGNORE_STACK
148 #define EXIT_IGNORE_STACK 0
149 #endif
150 
151 #ifndef HAVE_epilogue
152 #define HAVE_epilogue 0
153 #endif
154 #ifndef HAVE_prologue
155 #define HAVE_prologue 0
156 #endif
157 #ifndef HAVE_sibcall_epilogue
158 #define HAVE_sibcall_epilogue 0
159 #endif
160 
161 #ifndef LOCAL_REGNO
162 #define LOCAL_REGNO(REGNO)  0
163 #endif
164 #ifndef EPILOGUE_USES
165 #define EPILOGUE_USES(REGNO)  0
166 #endif
167 #ifndef EH_USES
168 #define EH_USES(REGNO)  0
169 #endif
170 
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
174 #endif
175 #endif
176 
177 /* Nonzero if the second flow pass has completed.  */
178 int flow2_completed;
179 
180 /* Maximum register number used in this function, plus one.  */
181 
182 int max_regno;
183 
184 /* Indexed by n, giving various register information */
185 
186 varray_type reg_n_info;
187 
188 /* Size of a regset for the current function,
189    in (1) bytes and (2) elements.  */
190 
191 int regset_bytes;
192 int regset_size;
193 
194 /* Regset of regs live when calls to `setjmp'-like functions happen.  */
195 /* ??? Does this exist only for the setjmp-clobbered warning message?  */
196 
197 regset regs_live_at_setjmp;
198 
199 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
200    that have to go in the same hard reg.
201    The first two regs in the list are a pair, and the next two
202    are another pair, etc.  */
203 rtx regs_may_share;
204 
205 /* Callback that determines if it's ok for a function to have no
206    noreturn attribute.  */
207 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
208 
209 /* Set of registers that may be eliminable.  These are handled specially
210    in updating regs_ever_live.  */
211 
212 static HARD_REG_SET elim_reg_set;
213 
214 /* Holds information for tracking conditional register life information.  */
215 struct reg_cond_life_info
216 {
217   /* A boolean expression of conditions under which a register is dead.  */
218   rtx condition;
219   /* Conditions under which a register is dead at the basic block end.  */
220   rtx orig_condition;
221 
222   /* A boolean expression of conditions under which a register has been
223      stored into.  */
224   rtx stores;
225 
226   /* ??? Could store mask of bytes that are dead, so that we could finally
227      track lifetimes of multi-word registers accessed via subregs.  */
228 };
229 
230 /* For use in communicating between propagate_block and its subroutines.
231    Holds all information needed to compute life and def-use information.  */
232 
233 struct propagate_block_info
234 {
235   /* The basic block we're considering.  */
236   basic_block bb;
237 
238   /* Bit N is set if register N is conditionally or unconditionally live.  */
239   regset reg_live;
240 
241   /* Bit N is set if register N is set this insn.  */
242   regset new_set;
243 
244   /* Element N is the next insn that uses (hard or pseudo) register N
245      within the current basic block; or zero, if there is no such insn.  */
246   rtx *reg_next_use;
247 
248   /* Contains a list of all the MEMs we are tracking for dead store
249      elimination.  */
250   rtx mem_set_list;
251 
252   /* If non-null, record the set of registers set unconditionally in the
253      basic block.  */
254   regset local_set;
255 
256   /* If non-null, record the set of registers set conditionally in the
257      basic block.  */
258   regset cond_local_set;
259 
260 #ifdef HAVE_conditional_execution
261   /* Indexed by register number, holds a reg_cond_life_info for each
262      register that is not unconditionally live or dead.  */
263   splay_tree reg_cond_dead;
264 
265   /* Bit N is set if register N is in an expression in reg_cond_dead.  */
266   regset reg_cond_reg;
267 #endif
268 
269   /* The length of mem_set_list.  */
270   int mem_set_list_len;
271 
272   /* Nonzero if the value of CC0 is live.  */
273   int cc0_live;
274 
275   /* Flags controling the set of information propagate_block collects.  */
276   int flags;
277 };
278 
279 /* Number of dead insns removed.  */
280 static int ndead;
281 
282 /* Maximum length of pbi->mem_set_list before we start dropping
283    new elements on the floor.  */
284 #define MAX_MEM_SET_LIST_LEN	100
285 
286 /* Forward declarations */
287 static int verify_wide_reg_1		PARAMS ((rtx *, void *));
288 static void verify_wide_reg		PARAMS ((int, basic_block));
289 static void verify_local_live_at_start	PARAMS ((regset, basic_block));
290 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
291 static void notice_stack_pointer_modification PARAMS ((rtx));
292 static void mark_reg			PARAMS ((rtx, void *));
293 static void mark_regs_live_at_end	PARAMS ((regset));
294 static int set_phi_alternative_reg      PARAMS ((rtx, int, int, void *));
295 static void calculate_global_regs_live	PARAMS ((sbitmap, sbitmap, int));
296 static void propagate_block_delete_insn PARAMS ((rtx));
297 static rtx propagate_block_delete_libcall PARAMS ((rtx, rtx));
298 static int insn_dead_p			PARAMS ((struct propagate_block_info *,
299 						 rtx, int, rtx));
300 static int libcall_dead_p		PARAMS ((struct propagate_block_info *,
301 						 rtx, rtx));
302 static void mark_set_regs		PARAMS ((struct propagate_block_info *,
303 						 rtx, rtx));
304 static void mark_set_1			PARAMS ((struct propagate_block_info *,
305 						 enum rtx_code, rtx, rtx,
306 						 rtx, int));
307 static int find_regno_partial		PARAMS ((rtx *, void *));
308 
309 #ifdef HAVE_conditional_execution
310 static int mark_regno_cond_dead		PARAMS ((struct propagate_block_info *,
311 						 int, rtx));
312 static void free_reg_cond_life_info	PARAMS ((splay_tree_value));
313 static int flush_reg_cond_reg_1		PARAMS ((splay_tree_node, void *));
314 static void flush_reg_cond_reg		PARAMS ((struct propagate_block_info *,
315 						 int));
316 static rtx elim_reg_cond		PARAMS ((rtx, unsigned int));
317 static rtx ior_reg_cond			PARAMS ((rtx, rtx, int));
318 static rtx not_reg_cond			PARAMS ((rtx));
319 static rtx and_reg_cond			PARAMS ((rtx, rtx, int));
320 #endif
321 #ifdef AUTO_INC_DEC
322 static void attempt_auto_inc		PARAMS ((struct propagate_block_info *,
323 						 rtx, rtx, rtx, rtx, rtx));
324 static void find_auto_inc		PARAMS ((struct propagate_block_info *,
325 						 rtx, rtx));
326 static int try_pre_increment_1		PARAMS ((struct propagate_block_info *,
327 						 rtx));
328 static int try_pre_increment		PARAMS ((rtx, rtx, HOST_WIDE_INT));
329 #endif
330 static void mark_used_reg		PARAMS ((struct propagate_block_info *,
331 						 rtx, rtx, rtx));
332 static void mark_used_regs		PARAMS ((struct propagate_block_info *,
333 						 rtx, rtx, rtx));
334 void dump_flow_info			PARAMS ((FILE *));
335 void debug_flow_info			PARAMS ((void));
336 static void add_to_mem_set_list		PARAMS ((struct propagate_block_info *,
337 						 rtx));
338 static int invalidate_mems_from_autoinc PARAMS ((rtx *, void *));
339 static void invalidate_mems_from_set	PARAMS ((struct propagate_block_info *,
340 						 rtx));
341 static void clear_log_links		PARAMS ((sbitmap));
342 
343 
344 void
check_function_return_warnings()345 check_function_return_warnings ()
346 {
347   if (warn_missing_noreturn
348       && !TREE_THIS_VOLATILE (cfun->decl)
349       && EXIT_BLOCK_PTR->pred == NULL
350       && (lang_missing_noreturn_ok_p
351 	  && !lang_missing_noreturn_ok_p (cfun->decl)))
352     warning ("function might be possible candidate for attribute `noreturn'");
353 
354   /* If we have a path to EXIT, then we do return.  */
355   if (TREE_THIS_VOLATILE (cfun->decl)
356       && EXIT_BLOCK_PTR->pred != NULL)
357     warning ("`noreturn' function does return");
358 
359   /* If the clobber_return_insn appears in some basic block, then we
360      do reach the end without returning a value.  */
361   else if (warn_return_type
362 	   && cfun->x_clobber_return_insn != NULL
363 	   && EXIT_BLOCK_PTR->pred != NULL)
364     {
365       int max_uid = get_max_uid ();
366 
367       /* If clobber_return_insn was excised by jump1, then renumber_insns
368 	 can make max_uid smaller than the number still recorded in our rtx.
369 	 That's fine, since this is a quick way of verifying that the insn
370 	 is no longer in the chain.  */
371       if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
372 	{
373 	  rtx insn;
374 
375 	  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
376 	    if (insn == cfun->x_clobber_return_insn)
377 	      {
378 	        warning ("control reaches end of non-void function");
379 		break;
380 	      }
381 	}
382     }
383 }
384 
385 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
386    note associated with the BLOCK.  */
387 
388 rtx
first_insn_after_basic_block_note(block)389 first_insn_after_basic_block_note (block)
390      basic_block block;
391 {
392   rtx insn;
393 
394   /* Get the first instruction in the block.  */
395   insn = block->head;
396 
397   if (insn == NULL_RTX)
398     return NULL_RTX;
399   if (GET_CODE (insn) == CODE_LABEL)
400     insn = NEXT_INSN (insn);
401   if (!NOTE_INSN_BASIC_BLOCK_P (insn))
402     abort ();
403 
404   return NEXT_INSN (insn);
405 }
406 
407 /* Perform data flow analysis.
408    F is the first insn of the function; FLAGS is a set of PROP_* flags
409    to be used in accumulating flow info.  */
410 
411 void
life_analysis(f,file,flags)412 life_analysis (f, file, flags)
413      rtx f;
414      FILE *file;
415      int flags;
416 {
417 #ifdef ELIMINABLE_REGS
418   int i;
419   static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
420 #endif
421 
422   /* Record which registers will be eliminated.  We use this in
423      mark_used_regs.  */
424 
425   CLEAR_HARD_REG_SET (elim_reg_set);
426 
427 #ifdef ELIMINABLE_REGS
428   for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
429     SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
430 #else
431   SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
432 #endif
433 
434 #ifdef CANNOT_CHANGE_MODE_CLASS
435   init_subregs_of_mode ();
436 #endif
437 
438   if (! optimize)
439     flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
440 
441   /* The post-reload life analysis have (on a global basis) the same
442      registers live as was computed by reload itself.  elimination
443      Otherwise offsets and such may be incorrect.
444 
445      Reload will make some registers as live even though they do not
446      appear in the rtl.
447 
448      We don't want to create new auto-incs after reload, since they
449      are unlikely to be useful and can cause problems with shared
450      stack slots.  */
451   if (reload_completed)
452     flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
453 
454   /* We want alias analysis information for local dead store elimination.  */
455   if (optimize && (flags & PROP_SCAN_DEAD_STORES))
456     init_alias_analysis ();
457 
458   /* Always remove no-op moves.  Do this before other processing so
459      that we don't have to keep re-scanning them.  */
460   delete_noop_moves (f);
461 
462   /* Some targets can emit simpler epilogues if they know that sp was
463      not ever modified during the function.  After reload, of course,
464      we've already emitted the epilogue so there's no sense searching.  */
465   if (! reload_completed)
466     notice_stack_pointer_modification (f);
467 
468   /* Allocate and zero out data structures that will record the
469      data from lifetime analysis.  */
470   allocate_reg_life_data ();
471   allocate_bb_life_data ();
472 
473   /* Find the set of registers live on function exit.  */
474   mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
475 
476   /* "Update" life info from zero.  It'd be nice to begin the
477      relaxation with just the exit and noreturn blocks, but that set
478      is not immediately handy.  */
479 
480   if (flags & PROP_REG_INFO)
481     memset (regs_ever_live, 0, sizeof (regs_ever_live));
482   update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
483 
484   /* Clean up.  */
485   if (optimize && (flags & PROP_SCAN_DEAD_STORES))
486     end_alias_analysis ();
487 
488   if (file)
489     dump_flow_info (file);
490 
491   free_basic_block_vars (1);
492 
493   /* Removing dead insns should've made jumptables really dead.  */
494   delete_dead_jumptables ();
495 }
496 
497 /* A subroutine of verify_wide_reg, called through for_each_rtx.
498    Search for REGNO.  If found, return 2 if it is not wider than
499    word_mode.  */
500 
501 static int
verify_wide_reg_1(px,pregno)502 verify_wide_reg_1 (px, pregno)
503      rtx *px;
504      void *pregno;
505 {
506   rtx x = *px;
507   unsigned int regno = *(int *) pregno;
508 
509   if (GET_CODE (x) == REG && REGNO (x) == regno)
510     {
511       if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
512 	return 2;
513       return 1;
514     }
515   return 0;
516 }
517 
518 /* A subroutine of verify_local_live_at_start.  Search through insns
519    of BB looking for register REGNO.  */
520 
521 static void
verify_wide_reg(regno,bb)522 verify_wide_reg (regno, bb)
523      int regno;
524      basic_block bb;
525 {
526   rtx head = bb->head, end = bb->end;
527 
528   while (1)
529     {
530       if (INSN_P (head))
531 	{
532 	  int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno);
533 	  if (r == 1)
534 	    return;
535 	  if (r == 2)
536 	    break;
537 	}
538       if (head == end)
539 	break;
540       head = NEXT_INSN (head);
541     }
542 
543   if (rtl_dump_file)
544     {
545       fprintf (rtl_dump_file, "Register %d died unexpectedly.\n", regno);
546       dump_bb (bb, rtl_dump_file);
547     }
548   abort ();
549 }
550 
551 /* A subroutine of update_life_info.  Verify that there are no untoward
552    changes in live_at_start during a local update.  */
553 
554 static void
verify_local_live_at_start(new_live_at_start,bb)555 verify_local_live_at_start (new_live_at_start, bb)
556      regset new_live_at_start;
557      basic_block bb;
558 {
559   if (reload_completed)
560     {
561       /* After reload, there are no pseudos, nor subregs of multi-word
562 	 registers.  The regsets should exactly match.  */
563       if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
564 	{
565 	  if (rtl_dump_file)
566 	    {
567 	      fprintf (rtl_dump_file,
568 		       "live_at_start mismatch in bb %d, aborting\nNew:\n",
569 		       bb->index);
570 	      debug_bitmap_file (rtl_dump_file, new_live_at_start);
571 	      fputs ("Old:\n", rtl_dump_file);
572 	      dump_bb (bb, rtl_dump_file);
573 	    }
574 	  abort ();
575 	}
576     }
577   else
578     {
579       int i;
580 
581       /* Find the set of changed registers.  */
582       XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
583 
584       EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
585 	{
586 	  /* No registers should die.  */
587 	  if (REGNO_REG_SET_P (bb->global_live_at_start, i))
588 	    {
589 	      if (rtl_dump_file)
590 		{
591 		  fprintf (rtl_dump_file,
592 			   "Register %d died unexpectedly.\n", i);
593 		  dump_bb (bb, rtl_dump_file);
594 		}
595 	      abort ();
596 	    }
597 
598 	  /* Verify that the now-live register is wider than word_mode.  */
599 	  verify_wide_reg (i, bb);
600 	});
601     }
602 }
603 
604 /* Updates life information starting with the basic blocks set in BLOCKS.
605    If BLOCKS is null, consider it to be the universal set.
606 
607    If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
608    we are only expecting local modifications to basic blocks.  If we find
609    extra registers live at the beginning of a block, then we either killed
610    useful data, or we have a broken split that wants data not provided.
611    If we find registers removed from live_at_start, that means we have
612    a broken peephole that is killing a register it shouldn't.
613 
614    ??? This is not true in one situation -- when a pre-reload splitter
615    generates subregs of a multi-word pseudo, current life analysis will
616    lose the kill.  So we _can_ have a pseudo go live.  How irritating.
617 
618    Including PROP_REG_INFO does not properly refresh regs_ever_live
619    unless the caller resets it to zero.  */
620 
621 int
update_life_info(blocks,extent,prop_flags)622 update_life_info (blocks, extent, prop_flags)
623      sbitmap blocks;
624      enum update_life_extent extent;
625      int prop_flags;
626 {
627   regset tmp;
628   regset_head tmp_head;
629   int i;
630   int stabilized_prop_flags = prop_flags;
631   basic_block bb;
632 
633   tmp = INITIALIZE_REG_SET (tmp_head);
634   ndead = 0;
635 
636   timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
637 		? TV_LIFE_UPDATE : TV_LIFE);
638 
639   /* Changes to the CFG are only allowed when
640      doing a global update for the entire CFG.  */
641   if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
642       && (extent == UPDATE_LIFE_LOCAL || blocks))
643     abort ();
644 
645   /* For a global update, we go through the relaxation process again.  */
646   if (extent != UPDATE_LIFE_LOCAL)
647     {
648       for ( ; ; )
649 	{
650 	  int changed = 0;
651 
652 	  calculate_global_regs_live (blocks, blocks,
653 				prop_flags & (PROP_SCAN_DEAD_CODE
654 					      | PROP_SCAN_DEAD_STORES
655 					      | PROP_ALLOW_CFG_CHANGES));
656 
657 	  if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
658 	      != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
659 	    break;
660 
661 	  /* Removing dead code may allow the CFG to be simplified which
662 	     in turn may allow for further dead code detection / removal.  */
663 	  FOR_EACH_BB_REVERSE (bb)
664 	    {
665 	      COPY_REG_SET (tmp, bb->global_live_at_end);
666 	      changed |= propagate_block (bb, tmp, NULL, NULL,
667 				prop_flags & (PROP_SCAN_DEAD_CODE
668 					      | PROP_SCAN_DEAD_STORES
669 					      | PROP_KILL_DEAD_CODE));
670 	    }
671 
672 	  /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to
673 	     subsequent propagate_block calls, since removing or acting as
674 	     removing dead code can affect global register liveness, which
675 	     is supposed to be finalized for this call after this loop.  */
676 	  stabilized_prop_flags
677 	    &= ~(PROP_SCAN_DEAD_CODE | PROP_SCAN_DEAD_STORES
678 		 | PROP_KILL_DEAD_CODE);
679 
680 	  if (! changed)
681 	    break;
682 
683 	  /* We repeat regardless of what cleanup_cfg says.  If there were
684 	     instructions deleted above, that might have been only a
685 	     partial improvement (see MAX_MEM_SET_LIST_LEN usage).
686 	     Further improvement may be possible.  */
687 	  cleanup_cfg (CLEANUP_EXPENSIVE);
688 	}
689 
690       /* If asked, remove notes from the blocks we'll update.  */
691       if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
692 	count_or_remove_death_notes (blocks, 1);
693     }
694 
695   /* Clear log links in case we are asked to (re)compute them.  */
696   if (prop_flags & PROP_LOG_LINKS)
697     clear_log_links (blocks);
698 
699   if (blocks)
700     {
701       EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
702 	{
703 	  bb = BASIC_BLOCK (i);
704 
705 	  COPY_REG_SET (tmp, bb->global_live_at_end);
706 	  propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
707 
708 	  if (extent == UPDATE_LIFE_LOCAL)
709 	    verify_local_live_at_start (tmp, bb);
710 	});
711     }
712   else
713     {
714       FOR_EACH_BB_REVERSE (bb)
715 	{
716 	  COPY_REG_SET (tmp, bb->global_live_at_end);
717 
718 	  propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
719 
720 	  if (extent == UPDATE_LIFE_LOCAL)
721 	    verify_local_live_at_start (tmp, bb);
722 	}
723     }
724 
725   FREE_REG_SET (tmp);
726 
727   if (prop_flags & PROP_REG_INFO)
728     {
729       /* The only pseudos that are live at the beginning of the function
730 	 are those that were not set anywhere in the function.  local-alloc
731 	 doesn't know how to handle these correctly, so mark them as not
732 	 local to any one basic block.  */
733       EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
734 				 FIRST_PSEUDO_REGISTER, i,
735 				 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
736 
737       /* We have a problem with any pseudoreg that lives across the setjmp.
738 	 ANSI says that if a user variable does not change in value between
739 	 the setjmp and the longjmp, then the longjmp preserves it.  This
740 	 includes longjmp from a place where the pseudo appears dead.
741 	 (In principle, the value still exists if it is in scope.)
742 	 If the pseudo goes in a hard reg, some other value may occupy
743 	 that hard reg where this pseudo is dead, thus clobbering the pseudo.
744 	 Conclusion: such a pseudo must not go in a hard reg.  */
745       EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
746 				 FIRST_PSEUDO_REGISTER, i,
747 				 {
748 				   if (regno_reg_rtx[i] != 0)
749 				     {
750 				       REG_LIVE_LENGTH (i) = -1;
751 				       REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
752 				     }
753 				 });
754     }
755   timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
756 	       ? TV_LIFE_UPDATE : TV_LIFE);
757   if (ndead && rtl_dump_file)
758     fprintf (rtl_dump_file, "deleted %i dead insns\n", ndead);
759   return ndead;
760 }
761 
762 /* Update life information in all blocks where BB_DIRTY is set.  */
763 
764 int
update_life_info_in_dirty_blocks(extent,prop_flags)765 update_life_info_in_dirty_blocks (extent, prop_flags)
766      enum update_life_extent extent;
767      int prop_flags;
768 {
769   sbitmap update_life_blocks = sbitmap_alloc (last_basic_block);
770   int n = 0;
771   basic_block bb;
772   int retval = 0;
773 
774   sbitmap_zero (update_life_blocks);
775   FOR_EACH_BB (bb)
776     {
777       if (extent == UPDATE_LIFE_LOCAL)
778 	{
779 	  if (bb->flags & BB_DIRTY)
780 	    {
781 	      SET_BIT (update_life_blocks, bb->index);
782 	      n++;
783 	    }
784 	}
785       else
786 	{
787 	  /* ??? Bootstrap with -march=pentium4 fails to terminate
788 	     with only a partial life update.  */
789 	  SET_BIT (update_life_blocks, bb->index);
790 	  if (bb->flags & BB_DIRTY)
791 	    n++;
792 	}
793     }
794 
795   if (n)
796     retval = update_life_info (update_life_blocks, extent, prop_flags);
797 
798   sbitmap_free (update_life_blocks);
799   return retval;
800 }
801 
802 /* Free the variables allocated by find_basic_blocks.
803 
804    KEEP_HEAD_END_P is nonzero if basic_block_info is not to be freed.  */
805 
806 void
free_basic_block_vars(keep_head_end_p)807 free_basic_block_vars (keep_head_end_p)
808      int keep_head_end_p;
809 {
810   if (! keep_head_end_p)
811     {
812       if (basic_block_info)
813 	{
814 	  clear_edges ();
815 	  VARRAY_FREE (basic_block_info);
816 	}
817       n_basic_blocks = 0;
818       last_basic_block = 0;
819 
820       ENTRY_BLOCK_PTR->aux = NULL;
821       ENTRY_BLOCK_PTR->global_live_at_end = NULL;
822       EXIT_BLOCK_PTR->aux = NULL;
823       EXIT_BLOCK_PTR->global_live_at_start = NULL;
824     }
825 }
826 
827 /* Delete any insns that copy a register to itself.  */
828 
829 int
delete_noop_moves(f)830 delete_noop_moves (f)
831      rtx f ATTRIBUTE_UNUSED;
832 {
833   rtx insn, next;
834   basic_block bb;
835   int nnoops = 0;
836 
837   FOR_EACH_BB (bb)
838     {
839       for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
840 	{
841 	  next = NEXT_INSN (insn);
842 	  if (INSN_P (insn) && noop_move_p (insn))
843 	    {
844 	      rtx note;
845 
846 	      /* If we're about to remove the first insn of a libcall
847 		 then move the libcall note to the next real insn and
848 		 update the retval note.  */
849 	      if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
850 		       && XEXP (note, 0) != insn)
851 		{
852 		  rtx new_libcall_insn = next_real_insn (insn);
853 		  rtx retval_note = find_reg_note (XEXP (note, 0),
854 						   REG_RETVAL, NULL_RTX);
855 		  REG_NOTES (new_libcall_insn)
856 		    = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
857 					 REG_NOTES (new_libcall_insn));
858 		  XEXP (retval_note, 0) = new_libcall_insn;
859 		}
860 
861 	      delete_insn_and_edges (insn);
862 	      nnoops++;
863 	    }
864 	}
865     }
866   if (nnoops && rtl_dump_file)
867     fprintf (rtl_dump_file, "deleted %i noop moves", nnoops);
868   return nnoops;
869 }
870 
871 /* Delete any jump tables never referenced.  We can't delete them at the
872    time of removing tablejump insn as they are referenced by the preceding
873    insns computing the destination, so we delay deleting and garbagecollect
874    them once life information is computed.  */
875 void
delete_dead_jumptables()876 delete_dead_jumptables ()
877 {
878   rtx insn, next;
879   for (insn = get_insns (); insn; insn = next)
880     {
881       next = NEXT_INSN (insn);
882       if (GET_CODE (insn) == CODE_LABEL
883 	  && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
884 	  && GET_CODE (next) == JUMP_INSN
885 	  && (GET_CODE (PATTERN (next)) == ADDR_VEC
886 	      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
887 	{
888 	  if (rtl_dump_file)
889 	    fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
890 	  delete_insn (NEXT_INSN (insn));
891 	  delete_insn (insn);
892 	  next = NEXT_INSN (next);
893 	}
894     }
895 }
896 
897 /* Determine if the stack pointer is constant over the life of the function.
898    Only useful before prologues have been emitted.  */
899 
900 static void
notice_stack_pointer_modification_1(x,pat,data)901 notice_stack_pointer_modification_1 (x, pat, data)
902      rtx x;
903      rtx pat ATTRIBUTE_UNUSED;
904      void *data ATTRIBUTE_UNUSED;
905 {
906   if (x == stack_pointer_rtx
907       /* The stack pointer is only modified indirectly as the result
908 	 of a push until later in flow.  See the comments in rtl.texi
909 	 regarding Embedded Side-Effects on Addresses.  */
910       || (GET_CODE (x) == MEM
911 	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
912 	  && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
913     current_function_sp_is_unchanging = 0;
914 }
915 
916 static void
notice_stack_pointer_modification(f)917 notice_stack_pointer_modification (f)
918      rtx f;
919 {
920   rtx insn;
921 
922   /* Assume that the stack pointer is unchanging if alloca hasn't
923      been used.  */
924   current_function_sp_is_unchanging = !current_function_calls_alloca;
925   if (! current_function_sp_is_unchanging)
926     return;
927 
928   for (insn = f; insn; insn = NEXT_INSN (insn))
929     {
930       if (INSN_P (insn))
931 	{
932 	  /* Check if insn modifies the stack pointer.  */
933 	  note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
934 		       NULL);
935 	  if (! current_function_sp_is_unchanging)
936 	    return;
937 	}
938     }
939 }
940 
941 /* Mark a register in SET.  Hard registers in large modes get all
942    of their component registers set as well.  */
943 
944 static void
mark_reg(reg,xset)945 mark_reg (reg, xset)
946      rtx reg;
947      void *xset;
948 {
949   regset set = (regset) xset;
950   int regno = REGNO (reg);
951 
952   if (GET_MODE (reg) == BLKmode)
953     abort ();
954 
955   SET_REGNO_REG_SET (set, regno);
956   if (regno < FIRST_PSEUDO_REGISTER)
957     {
958       int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
959       while (--n > 0)
960 	SET_REGNO_REG_SET (set, regno + n);
961     }
962 }
963 
964 /* Mark those regs which are needed at the end of the function as live
965    at the end of the last basic block.  */
966 
967 static void
mark_regs_live_at_end(set)968 mark_regs_live_at_end (set)
969      regset set;
970 {
971   unsigned int i;
972 
973   /* If exiting needs the right stack value, consider the stack pointer
974      live at the end of the function.  */
975   if ((HAVE_epilogue && reload_completed)
976       || ! EXIT_IGNORE_STACK
977       || (! FRAME_POINTER_REQUIRED
978 	  && ! current_function_calls_alloca
979 	  && flag_omit_frame_pointer)
980       || current_function_sp_is_unchanging)
981     {
982       SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
983     }
984 
985   /* Mark the frame pointer if needed at the end of the function.  If
986      we end up eliminating it, it will be removed from the live list
987      of each basic block by reload.  */
988 
989   if (! reload_completed || frame_pointer_needed)
990     {
991       SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
992 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
993       /* If they are different, also mark the hard frame pointer as live.  */
994       if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
995 	SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
996 #endif
997     }
998 
999 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1000   /* Many architectures have a GP register even without flag_pic.
1001      Assume the pic register is not in use, or will be handled by
1002      other means, if it is not fixed.  */
1003   if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1004       && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1005     SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
1006 #endif
1007 
1008   /* Mark all global registers, and all registers used by the epilogue
1009      as being live at the end of the function since they may be
1010      referenced by our caller.  */
1011   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1012     if (global_regs[i] || EPILOGUE_USES (i))
1013       SET_REGNO_REG_SET (set, i);
1014 
1015   if (HAVE_epilogue && reload_completed)
1016     {
1017       /* Mark all call-saved registers that we actually used.  */
1018       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1019 	if (regs_ever_live[i] && ! LOCAL_REGNO (i)
1020 	    && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1021 	  SET_REGNO_REG_SET (set, i);
1022     }
1023 
1024 #ifdef EH_RETURN_DATA_REGNO
1025   /* Mark the registers that will contain data for the handler.  */
1026   if (reload_completed && current_function_calls_eh_return)
1027     for (i = 0; ; ++i)
1028       {
1029 	unsigned regno = EH_RETURN_DATA_REGNO(i);
1030 	if (regno == INVALID_REGNUM)
1031 	  break;
1032 	SET_REGNO_REG_SET (set, regno);
1033       }
1034 #endif
1035 #ifdef EH_RETURN_STACKADJ_RTX
1036   if ((! HAVE_epilogue || ! reload_completed)
1037       && current_function_calls_eh_return)
1038     {
1039       rtx tmp = EH_RETURN_STACKADJ_RTX;
1040       if (tmp && REG_P (tmp))
1041 	mark_reg (tmp, set);
1042     }
1043 #endif
1044 #ifdef EH_RETURN_HANDLER_RTX
1045   if ((! HAVE_epilogue || ! reload_completed)
1046       && current_function_calls_eh_return)
1047     {
1048       rtx tmp = EH_RETURN_HANDLER_RTX;
1049       if (tmp && REG_P (tmp))
1050 	mark_reg (tmp, set);
1051     }
1052 #endif
1053 
1054   /* Mark function return value.  */
1055   diddle_return_value (mark_reg, set);
1056 }
1057 
1058 /* Callback function for for_each_successor_phi.  DATA is a regset.
1059    Sets the SRC_REGNO, the regno of the phi alternative for phi node
1060    INSN, in the regset.  */
1061 
1062 static int
set_phi_alternative_reg(insn,dest_regno,src_regno,data)1063 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
1064      rtx insn ATTRIBUTE_UNUSED;
1065      int dest_regno ATTRIBUTE_UNUSED;
1066      int src_regno;
1067      void *data;
1068 {
1069   regset live = (regset) data;
1070   SET_REGNO_REG_SET (live, src_regno);
1071   return 0;
1072 }
1073 
1074 /* Propagate global life info around the graph of basic blocks.  Begin
1075    considering blocks with their corresponding bit set in BLOCKS_IN.
1076    If BLOCKS_IN is null, consider it the universal set.
1077 
1078    BLOCKS_OUT is set for every block that was changed.  */
1079 
1080 static void
calculate_global_regs_live(blocks_in,blocks_out,flags)1081 calculate_global_regs_live (blocks_in, blocks_out, flags)
1082      sbitmap blocks_in, blocks_out;
1083      int flags;
1084 {
1085   basic_block *queue, *qhead, *qtail, *qend, bb;
1086   regset tmp, new_live_at_end, invalidated_by_call;
1087   regset_head tmp_head, invalidated_by_call_head;
1088   regset_head new_live_at_end_head;
1089   int i;
1090 
1091   /* Some passes used to forget clear aux field of basic block causing
1092      sick behavior here.  */
1093 #ifdef ENABLE_CHECKING
1094   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1095     if (bb->aux)
1096       abort ();
1097 #endif
1098 
1099   tmp = INITIALIZE_REG_SET (tmp_head);
1100   new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
1101   invalidated_by_call = INITIALIZE_REG_SET (invalidated_by_call_head);
1102 
1103   /* Inconveniently, this is only readily available in hard reg set form.  */
1104   for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1105     if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1106       SET_REGNO_REG_SET (invalidated_by_call, i);
1107 
1108   /* Create a worklist.  Allocate an extra slot for ENTRY_BLOCK, and one
1109      because the `head == tail' style test for an empty queue doesn't
1110      work with a full queue.  */
1111   queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
1112   qtail = queue;
1113   qhead = qend = queue + n_basic_blocks + 2;
1114 
1115   /* Queue the blocks set in the initial mask.  Do this in reverse block
1116      number order so that we are more likely for the first round to do
1117      useful work.  We use AUX non-null to flag that the block is queued.  */
1118   if (blocks_in)
1119     {
1120       FOR_EACH_BB (bb)
1121 	if (TEST_BIT (blocks_in, bb->index))
1122 	  {
1123 	    *--qhead = bb;
1124 	    bb->aux = bb;
1125 	  }
1126     }
1127   else
1128     {
1129       FOR_EACH_BB (bb)
1130 	{
1131 	  *--qhead = bb;
1132 	  bb->aux = bb;
1133 	}
1134     }
1135 
1136   /* We clean aux when we remove the initially-enqueued bbs, but we
1137      don't enqueue ENTRY and EXIT initially, so clean them upfront and
1138      unconditionally.  */
1139   ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL;
1140 
1141   if (blocks_out)
1142     sbitmap_zero (blocks_out);
1143 
1144   /* We work through the queue until there are no more blocks.  What
1145      is live at the end of this block is precisely the union of what
1146      is live at the beginning of all its successors.  So, we set its
1147      GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
1148      for its successors.  Then, we compute GLOBAL_LIVE_AT_START for
1149      this block by walking through the instructions in this block in
1150      reverse order and updating as we go.  If that changed
1151      GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
1152      queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1153 
1154      We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
1155      never shrinks.  If a register appears in GLOBAL_LIVE_AT_START, it
1156      must either be live at the end of the block, or used within the
1157      block.  In the latter case, it will certainly never disappear
1158      from GLOBAL_LIVE_AT_START.  In the former case, the register
1159      could go away only if it disappeared from GLOBAL_LIVE_AT_START
1160      for one of the successor blocks.  By induction, that cannot
1161      occur.  */
1162   while (qhead != qtail)
1163     {
1164       int rescan, changed;
1165       basic_block bb;
1166       edge e;
1167 
1168       bb = *qhead++;
1169       if (qhead == qend)
1170 	qhead = queue;
1171       bb->aux = NULL;
1172 
1173       /* Begin by propagating live_at_start from the successor blocks.  */
1174       CLEAR_REG_SET (new_live_at_end);
1175 
1176       if (bb->succ)
1177 	for (e = bb->succ; e; e = e->succ_next)
1178 	  {
1179 	    basic_block sb = e->dest;
1180 
1181 	    /* Call-clobbered registers die across exception and
1182 	       call edges.  */
1183 	    /* ??? Abnormal call edges ignored for the moment, as this gets
1184 	       confused by sibling call edges, which crashes reg-stack.  */
1185 	    if (e->flags & EDGE_EH)
1186 	      {
1187 		bitmap_operation (tmp, sb->global_live_at_start,
1188 				  invalidated_by_call, BITMAP_AND_COMPL);
1189 		IOR_REG_SET (new_live_at_end, tmp);
1190 	      }
1191 	    else
1192 	      IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
1193 
1194 	    /* If a target saves one register in another (instead of on
1195 	       the stack) the save register will need to be live for EH.  */
1196 	    if (e->flags & EDGE_EH)
1197 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1198 		if (EH_USES (i))
1199 		  SET_REGNO_REG_SET (new_live_at_end, i);
1200 	  }
1201       else
1202 	{
1203 	  /* This might be a noreturn function that throws.  And
1204 	     even if it isn't, getting the unwind info right helps
1205 	     debugging.  */
1206 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1207 	    if (EH_USES (i))
1208 	      SET_REGNO_REG_SET (new_live_at_end, i);
1209 	}
1210 
1211       /* The all-important stack pointer must always be live.  */
1212       SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1213 
1214       /* Before reload, there are a few registers that must be forced
1215 	 live everywhere -- which might not already be the case for
1216 	 blocks within infinite loops.  */
1217       if (! reload_completed)
1218 	{
1219 	  /* Any reference to any pseudo before reload is a potential
1220 	     reference of the frame pointer.  */
1221 	  SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
1222 
1223 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1224 	  /* Pseudos with argument area equivalences may require
1225 	     reloading via the argument pointer.  */
1226 	  if (fixed_regs[ARG_POINTER_REGNUM])
1227 	    SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
1228 #endif
1229 
1230 	  /* Any constant, or pseudo with constant equivalences, may
1231 	     require reloading from memory using the pic register.  */
1232 	  if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1233 	      && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1234 	    SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1235 	}
1236 
1237       /* Regs used in phi nodes are not included in
1238 	 global_live_at_start, since they are live only along a
1239 	 particular edge.  Set those regs that are live because of a
1240 	 phi node alternative corresponding to this particular block.  */
1241       if (in_ssa_form)
1242 	for_each_successor_phi (bb, &set_phi_alternative_reg,
1243 				new_live_at_end);
1244 
1245       if (bb == ENTRY_BLOCK_PTR)
1246 	{
1247 	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1248 	  continue;
1249 	}
1250 
1251       /* On our first pass through this block, we'll go ahead and continue.
1252 	 Recognize first pass by local_set NULL.  On subsequent passes, we
1253 	 get to skip out early if live_at_end wouldn't have changed.  */
1254 
1255       if (bb->local_set == NULL)
1256 	{
1257 	  bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1258 	  bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1259 	  rescan = 1;
1260 	}
1261       else
1262 	{
1263 	  /* If any bits were removed from live_at_end, we'll have to
1264 	     rescan the block.  This wouldn't be necessary if we had
1265 	     precalculated local_live, however with PROP_SCAN_DEAD_CODE
1266 	     local_live is really dependent on live_at_end.  */
1267 	  CLEAR_REG_SET (tmp);
1268 	  rescan = bitmap_operation (tmp, bb->global_live_at_end,
1269 				     new_live_at_end, BITMAP_AND_COMPL);
1270 
1271 	  if (! rescan)
1272 	    {
1273 	      /* If any of the registers in the new live_at_end set are
1274 		 conditionally set in this basic block, we must rescan.
1275 	         This is because conditional lifetimes at the end of the
1276 		 block do not just take the live_at_end set into account,
1277 		 but also the liveness at the start of each successor
1278 		 block.  We can miss changes in those sets if we only
1279 		 compare the new live_at_end against the previous one.  */
1280 	      CLEAR_REG_SET (tmp);
1281 	      rescan = bitmap_operation (tmp, new_live_at_end,
1282 					 bb->cond_local_set, BITMAP_AND);
1283 	    }
1284 
1285 	  if (! rescan)
1286 	    {
1287 	      /* Find the set of changed bits.  Take this opportunity
1288 		 to notice that this set is empty and early out.  */
1289 	      CLEAR_REG_SET (tmp);
1290 	      changed = bitmap_operation (tmp, bb->global_live_at_end,
1291 					  new_live_at_end, BITMAP_XOR);
1292 	      if (! changed)
1293 		continue;
1294 
1295 	      /* If any of the changed bits overlap with local_set,
1296 		 we'll have to rescan the block.  Detect overlap by
1297 		 the AND with ~local_set turning off bits.  */
1298 	      rescan = bitmap_operation (tmp, tmp, bb->local_set,
1299 					 BITMAP_AND_COMPL);
1300 	    }
1301 	}
1302 
1303       /* Let our caller know that BB changed enough to require its
1304 	 death notes updated.  */
1305       if (blocks_out)
1306 	SET_BIT (blocks_out, bb->index);
1307 
1308       if (! rescan)
1309 	{
1310 	  /* Add to live_at_start the set of all registers in
1311 	     new_live_at_end that aren't in the old live_at_end.  */
1312 
1313 	  bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
1314 			    BITMAP_AND_COMPL);
1315 	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1316 
1317 	  changed = bitmap_operation (bb->global_live_at_start,
1318 				      bb->global_live_at_start,
1319 				      tmp, BITMAP_IOR);
1320 	  if (! changed)
1321 	    continue;
1322 	}
1323       else
1324 	{
1325 	  COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1326 
1327 	  /* Rescan the block insn by insn to turn (a copy of) live_at_end
1328 	     into live_at_start.  */
1329 	  propagate_block (bb, new_live_at_end, bb->local_set,
1330 			   bb->cond_local_set, flags);
1331 
1332 	  /* If live_at start didn't change, no need to go farther.  */
1333 	  if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
1334 	    continue;
1335 
1336 	  COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
1337 	}
1338 
1339       /* Queue all predecessors of BB so that we may re-examine
1340 	 their live_at_end.  */
1341       for (e = bb->pred; e; e = e->pred_next)
1342 	{
1343 	  basic_block pb = e->src;
1344 	  if (pb->aux == NULL)
1345 	    {
1346 	      *qtail++ = pb;
1347 	      if (qtail == qend)
1348 		qtail = queue;
1349 	      pb->aux = pb;
1350 	    }
1351 	}
1352     }
1353 
1354   FREE_REG_SET (tmp);
1355   FREE_REG_SET (new_live_at_end);
1356   FREE_REG_SET (invalidated_by_call);
1357 
1358   if (blocks_out)
1359     {
1360       EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
1361 	{
1362 	  basic_block bb = BASIC_BLOCK (i);
1363 	  FREE_REG_SET (bb->local_set);
1364 	  FREE_REG_SET (bb->cond_local_set);
1365 	});
1366     }
1367   else
1368     {
1369       FOR_EACH_BB (bb)
1370 	{
1371 	  FREE_REG_SET (bb->local_set);
1372 	  FREE_REG_SET (bb->cond_local_set);
1373 	}
1374     }
1375 
1376   free (queue);
1377 }
1378 
1379 
1380 /* This structure is used to pass parameters to and from the
1381    the function find_regno_partial(). It is used to pass in the
1382    register number we are looking, as well as to return any rtx
1383    we find.  */
1384 
1385 typedef struct {
1386   unsigned regno_to_find;
1387   rtx retval;
1388 } find_regno_partial_param;
1389 
1390 
1391 /* Find the rtx for the reg numbers specified in 'data' if it is
1392    part of an expression which only uses part of the register.  Return
1393    it in the structure passed in.  */
1394 static int
find_regno_partial(ptr,data)1395 find_regno_partial (ptr, data)
1396      rtx *ptr;
1397      void *data;
1398 {
1399   find_regno_partial_param *param = (find_regno_partial_param *)data;
1400   unsigned reg = param->regno_to_find;
1401   param->retval = NULL_RTX;
1402 
1403   if (*ptr == NULL_RTX)
1404     return 0;
1405 
1406   switch (GET_CODE (*ptr))
1407     {
1408     case ZERO_EXTRACT:
1409     case SIGN_EXTRACT:
1410     case STRICT_LOW_PART:
1411       if (GET_CODE (XEXP (*ptr, 0)) == REG && REGNO (XEXP (*ptr, 0)) == reg)
1412 	{
1413 	  param->retval = XEXP (*ptr, 0);
1414 	  return 1;
1415 	}
1416       break;
1417 
1418     case SUBREG:
1419       if (GET_CODE (SUBREG_REG (*ptr)) == REG
1420 	  && REGNO (SUBREG_REG (*ptr)) == reg)
1421 	{
1422 	  param->retval = SUBREG_REG (*ptr);
1423 	  return 1;
1424 	}
1425       break;
1426 
1427     default:
1428       break;
1429     }
1430 
1431   return 0;
1432 }
1433 
1434 /* Process all immediate successors of the entry block looking for pseudo
1435    registers which are live on entry. Find all of those whose first
1436    instance is a partial register reference of some kind, and initialize
1437    them to 0 after the entry block.  This will prevent bit sets within
1438    registers whose value is unknown, and may contain some kind of sticky
1439    bits we don't want.  */
1440 
1441 int
initialize_uninitialized_subregs()1442 initialize_uninitialized_subregs ()
1443 {
1444   rtx insn;
1445   edge e;
1446   int reg, did_something = 0;
1447   find_regno_partial_param param;
1448 
1449   for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1450     {
1451       basic_block bb = e->dest;
1452       regset map = bb->global_live_at_start;
1453       EXECUTE_IF_SET_IN_REG_SET (map,
1454 				 FIRST_PSEUDO_REGISTER, reg,
1455 	{
1456 	  int uid = REGNO_FIRST_UID (reg);
1457 	  rtx i;
1458 
1459 	  /* Find an insn which mentions the register we are looking for.
1460 	     Its preferable to have an instance of the register's rtl since
1461 	     there may be various flags set which we need to duplicate.
1462 	     If we can't find it, its probably an automatic whose initial
1463 	     value doesn't matter, or hopefully something we don't care about.  */
1464 	  for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
1465 	    ;
1466 	  if (i != NULL_RTX)
1467 	    {
1468 	      /* Found the insn, now get the REG rtx, if we can.  */
1469 	      param.regno_to_find = reg;
1470 	      for_each_rtx (&i, find_regno_partial, &param);
1471 	      if (param.retval != NULL_RTX)
1472 		{
1473 		  start_sequence ();
1474 		  emit_move_insn (param.retval,
1475 				  CONST0_RTX (GET_MODE (param.retval)));
1476 		  insn = get_insns ();
1477 		  end_sequence ();
1478 		  insert_insn_on_edge (insn, e);
1479 		  did_something = 1;
1480 		}
1481 	    }
1482 	});
1483     }
1484 
1485   if (did_something)
1486     commit_edge_insertions ();
1487   return did_something;
1488 }
1489 
1490 
1491 /* Subroutines of life analysis.  */
1492 
1493 /* Allocate the permanent data structures that represent the results
1494    of life analysis.  Not static since used also for stupid life analysis.  */
1495 
1496 void
allocate_bb_life_data()1497 allocate_bb_life_data ()
1498 {
1499   basic_block bb;
1500 
1501   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1502     {
1503       bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1504       bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1505     }
1506 
1507   regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1508 }
1509 
1510 void
allocate_reg_life_data()1511 allocate_reg_life_data ()
1512 {
1513   int i;
1514 
1515   max_regno = max_reg_num ();
1516 
1517   /* Recalculate the register space, in case it has grown.  Old style
1518      vector oriented regsets would set regset_{size,bytes} here also.  */
1519   allocate_reg_info (max_regno, FALSE, FALSE);
1520 
1521   /* Reset all the data we'll collect in propagate_block and its
1522      subroutines.  */
1523   for (i = 0; i < max_regno; i++)
1524     {
1525       REG_N_SETS (i) = 0;
1526       REG_N_REFS (i) = 0;
1527       REG_N_DEATHS (i) = 0;
1528       REG_N_CALLS_CROSSED (i) = 0;
1529       REG_LIVE_LENGTH (i) = 0;
1530       REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1531     }
1532 }
1533 
1534 /* Delete dead instructions for propagate_block.  */
1535 
1536 static void
propagate_block_delete_insn(insn)1537 propagate_block_delete_insn (insn)
1538      rtx insn;
1539 {
1540   rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1541 
1542   /* If the insn referred to a label, and that label was attached to
1543      an ADDR_VEC, it's safe to delete the ADDR_VEC.  In fact, it's
1544      pretty much mandatory to delete it, because the ADDR_VEC may be
1545      referencing labels that no longer exist.
1546 
1547      INSN may reference a deleted label, particularly when a jump
1548      table has been optimized into a direct jump.  There's no
1549      real good way to fix up the reference to the deleted label
1550      when the label is deleted, so we just allow it here.  */
1551 
1552   if (inote && GET_CODE (inote) == CODE_LABEL)
1553     {
1554       rtx label = XEXP (inote, 0);
1555       rtx next;
1556 
1557       /* The label may be forced if it has been put in the constant
1558 	 pool.  If that is the only use we must discard the table
1559 	 jump following it, but not the label itself.  */
1560       if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
1561 	  && (next = next_nonnote_insn (label)) != NULL
1562 	  && GET_CODE (next) == JUMP_INSN
1563 	  && (GET_CODE (PATTERN (next)) == ADDR_VEC
1564 	      || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1565 	{
1566 	  rtx pat = PATTERN (next);
1567 	  int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1568 	  int len = XVECLEN (pat, diff_vec_p);
1569 	  int i;
1570 
1571 	  for (i = 0; i < len; i++)
1572 	    LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1573 
1574 	  delete_insn_and_edges (next);
1575 	  ndead++;
1576 	}
1577     }
1578 
1579   delete_insn_and_edges (insn);
1580   ndead++;
1581 }
1582 
1583 /* Delete dead libcalls for propagate_block.  Return the insn
1584    before the libcall.  */
1585 
1586 static rtx
propagate_block_delete_libcall(insn,note)1587 propagate_block_delete_libcall ( insn, note)
1588      rtx insn, note;
1589 {
1590   rtx first = XEXP (note, 0);
1591   rtx before = PREV_INSN (first);
1592 
1593   delete_insn_chain_and_edges (first, insn);
1594   ndead++;
1595   return before;
1596 }
1597 
1598 /* Update the life-status of regs for one insn.  Return the previous insn.  */
1599 
1600 rtx
propagate_one_insn(pbi,insn)1601 propagate_one_insn (pbi, insn)
1602      struct propagate_block_info *pbi;
1603      rtx insn;
1604 {
1605   rtx prev = PREV_INSN (insn);
1606   int flags = pbi->flags;
1607   int insn_is_dead = 0;
1608   int libcall_is_dead = 0;
1609   rtx note;
1610   int i;
1611 
1612   if (! INSN_P (insn))
1613     return prev;
1614 
1615   note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1616   if (flags & PROP_SCAN_DEAD_CODE)
1617     {
1618       insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
1619       libcall_is_dead = (insn_is_dead && note != 0
1620 			 && libcall_dead_p (pbi, note, insn));
1621     }
1622 
1623   /* If an instruction consists of just dead store(s) on final pass,
1624      delete it.  */
1625   if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1626     {
1627       /* If we're trying to delete a prologue or epilogue instruction
1628 	 that isn't flagged as possibly being dead, something is wrong.
1629 	 But if we are keeping the stack pointer depressed, we might well
1630 	 be deleting insns that are used to compute the amount to update
1631 	 it by, so they are fine.  */
1632       if (reload_completed
1633 	  && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1634 		&& (TYPE_RETURNS_STACK_DEPRESSED
1635 		    (TREE_TYPE (current_function_decl))))
1636 	  && (((HAVE_epilogue || HAVE_prologue)
1637 	       && prologue_epilogue_contains (insn))
1638 	      || (HAVE_sibcall_epilogue
1639 		  && sibcall_epilogue_contains (insn)))
1640 	  && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1641 	fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1642 
1643       /* Record sets.  Do this even for dead instructions, since they
1644 	 would have killed the values if they hadn't been deleted.  */
1645       mark_set_regs (pbi, PATTERN (insn), insn);
1646 
1647       /* CC0 is now known to be dead.  Either this insn used it,
1648 	 in which case it doesn't anymore, or clobbered it,
1649 	 so the next insn can't use it.  */
1650       pbi->cc0_live = 0;
1651 
1652       if (libcall_is_dead)
1653 	prev = propagate_block_delete_libcall ( insn, note);
1654       else
1655 	{
1656 
1657 	/* If INSN contains a RETVAL note and is dead, but the libcall
1658 	   as a whole is not dead, then we want to remove INSN, but
1659 	   not the whole libcall sequence.
1660 
1661 	   However, we need to also remove the dangling REG_LIBCALL
1662 	   note so that we do not have mis-matched LIBCALL/RETVAL
1663 	   notes.  In theory we could find a new location for the
1664 	   REG_RETVAL note, but it hardly seems worth the effort.
1665 
1666 	   NOTE at this point will be the RETVAL note if it exists.  */
1667 	  if (note)
1668 	    {
1669 	      rtx libcall_note;
1670 
1671 	      libcall_note
1672 		= find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
1673 	      remove_note (XEXP (note, 0), libcall_note);
1674 	    }
1675 
1676 	  /* Similarly if INSN contains a LIBCALL note, remove the
1677 	     dnagling REG_RETVAL note.  */
1678 	  note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
1679 	  if (note)
1680 	    {
1681 	      rtx retval_note;
1682 
1683 	      retval_note
1684 		= find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
1685 	      remove_note (XEXP (note, 0), retval_note);
1686 	    }
1687 
1688 	  /* Now delete INSN.  */
1689 	  propagate_block_delete_insn (insn);
1690 	}
1691 
1692       return prev;
1693     }
1694 
1695   /* See if this is an increment or decrement that can be merged into
1696      a following memory address.  */
1697 #ifdef AUTO_INC_DEC
1698   {
1699     rtx x = single_set (insn);
1700 
1701     /* Does this instruction increment or decrement a register?  */
1702     if ((flags & PROP_AUTOINC)
1703 	&& x != 0
1704 	&& GET_CODE (SET_DEST (x)) == REG
1705 	&& (GET_CODE (SET_SRC (x)) == PLUS
1706 	    || GET_CODE (SET_SRC (x)) == MINUS)
1707 	&& XEXP (SET_SRC (x), 0) == SET_DEST (x)
1708 	&& GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1709 	/* Ok, look for a following memory ref we can combine with.
1710 	   If one is found, change the memory ref to a PRE_INC
1711 	   or PRE_DEC, cancel this insn, and return 1.
1712 	   Return 0 if nothing has been done.  */
1713 	&& try_pre_increment_1 (pbi, insn))
1714       return prev;
1715   }
1716 #endif /* AUTO_INC_DEC */
1717 
1718   CLEAR_REG_SET (pbi->new_set);
1719 
1720   /* If this is not the final pass, and this insn is copying the value of
1721      a library call and it's dead, don't scan the insns that perform the
1722      library call, so that the call's arguments are not marked live.  */
1723   if (libcall_is_dead)
1724     {
1725       /* Record the death of the dest reg.  */
1726       mark_set_regs (pbi, PATTERN (insn), insn);
1727 
1728       insn = XEXP (note, 0);
1729       return PREV_INSN (insn);
1730     }
1731   else if (GET_CODE (PATTERN (insn)) == SET
1732 	   && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1733 	   && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1734 	   && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1735 	   && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1736     /* We have an insn to pop a constant amount off the stack.
1737        (Such insns use PLUS regardless of the direction of the stack,
1738        and any insn to adjust the stack by a constant is always a pop.)
1739        These insns, if not dead stores, have no effect on life, though
1740        they do have an effect on the memory stores we are tracking.  */
1741     invalidate_mems_from_set (pbi, stack_pointer_rtx);
1742   else
1743     {
1744       rtx note;
1745       /* Any regs live at the time of a call instruction must not go
1746 	 in a register clobbered by calls.  Find all regs now live and
1747 	 record this for them.  */
1748 
1749       if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
1750 	EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1751 				   { REG_N_CALLS_CROSSED (i)++; });
1752 
1753       /* Record sets.  Do this even for dead instructions, since they
1754 	 would have killed the values if they hadn't been deleted.  */
1755       mark_set_regs (pbi, PATTERN (insn), insn);
1756 
1757       if (GET_CODE (insn) == CALL_INSN)
1758 	{
1759 	  regset live_at_end;
1760 	  bool sibcall_p;
1761 	  rtx note, cond;
1762 	  int i;
1763 
1764 	  cond = NULL_RTX;
1765 	  if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1766 	    cond = COND_EXEC_TEST (PATTERN (insn));
1767 
1768 	  /* Non-constant calls clobber memory, constant calls do not
1769 	     clobber memory, though they may clobber outgoing arguments
1770 	     on the stack.  */
1771 	  if (! CONST_OR_PURE_CALL_P (insn))
1772 	    {
1773 	      free_EXPR_LIST_list (&pbi->mem_set_list);
1774 	      pbi->mem_set_list_len = 0;
1775 	    }
1776 	  else
1777 	    invalidate_mems_from_set (pbi, stack_pointer_rtx);
1778 
1779 	  /* There may be extra registers to be clobbered.  */
1780 	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
1781 	       note;
1782 	       note = XEXP (note, 1))
1783 	    if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1784 	      mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
1785 			  cond, insn, pbi->flags);
1786 
1787 	  /* Calls change all call-used and global registers; sibcalls do not
1788 	     clobber anything that must be preserved at end-of-function,
1789 	     except for return values.  */
1790 
1791 	  sibcall_p = SIBLING_CALL_P (insn);
1792 	  live_at_end = EXIT_BLOCK_PTR->global_live_at_start;
1793 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1794 	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
1795 		&& ! (sibcall_p
1796 		      && REGNO_REG_SET_P (live_at_end, i)
1797 		      && ! refers_to_regno_p (i, i+1,
1798 					      current_function_return_rtx,
1799 					      (rtx *) 0)))
1800 	      {
1801 		enum rtx_code code = global_regs[i] ? SET : CLOBBER;
1802 		/* We do not want REG_UNUSED notes for these registers.  */
1803 		mark_set_1 (pbi, code, regno_reg_rtx[i], cond, insn,
1804 			    pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
1805 	      }
1806 	}
1807 
1808       /* If an insn doesn't use CC0, it becomes dead since we assume
1809 	 that every insn clobbers it.  So show it dead here;
1810 	 mark_used_regs will set it live if it is referenced.  */
1811       pbi->cc0_live = 0;
1812 
1813       /* Record uses.  */
1814       if (! insn_is_dead)
1815 	mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1816       if ((flags & PROP_EQUAL_NOTES)
1817 	  && ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX))
1818 	      || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX))))
1819 	mark_used_regs (pbi, XEXP (note, 0), NULL_RTX, insn);
1820 
1821       /* Sometimes we may have inserted something before INSN (such as a move)
1822 	 when we make an auto-inc.  So ensure we will scan those insns.  */
1823 #ifdef AUTO_INC_DEC
1824       prev = PREV_INSN (insn);
1825 #endif
1826 
1827       if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1828 	{
1829 	  int i;
1830 	  rtx note, cond;
1831 
1832 	  cond = NULL_RTX;
1833 	  if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1834 	    cond = COND_EXEC_TEST (PATTERN (insn));
1835 
1836 	  /* Calls use their arguments.  */
1837 	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
1838 	       note;
1839 	       note = XEXP (note, 1))
1840 	    if (GET_CODE (XEXP (note, 0)) == USE)
1841 	      mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
1842 			      cond, insn);
1843 
1844 	  /* The stack ptr is used (honorarily) by a CALL insn.  */
1845 	  SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1846 
1847 	  /* Calls may also reference any of the global registers,
1848 	     so they are made live.  */
1849 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1850 	    if (global_regs[i])
1851 	      mark_used_reg (pbi, regno_reg_rtx[i], cond, insn);
1852 	}
1853     }
1854 
1855   /* On final pass, update counts of how many insns in which each reg
1856      is live.  */
1857   if (flags & PROP_REG_INFO)
1858     EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1859 			       { REG_LIVE_LENGTH (i)++; });
1860 
1861   return prev;
1862 }
1863 
1864 /* Initialize a propagate_block_info struct for public consumption.
1865    Note that the structure itself is opaque to this file, but that
1866    the user can use the regsets provided here.  */
1867 
1868 struct propagate_block_info *
init_propagate_block_info(bb,live,local_set,cond_local_set,flags)1869 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
1870      basic_block bb;
1871      regset live, local_set, cond_local_set;
1872      int flags;
1873 {
1874   struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1875 
1876   pbi->bb = bb;
1877   pbi->reg_live = live;
1878   pbi->mem_set_list = NULL_RTX;
1879   pbi->mem_set_list_len = 0;
1880   pbi->local_set = local_set;
1881   pbi->cond_local_set = cond_local_set;
1882   pbi->cc0_live = 0;
1883   pbi->flags = flags;
1884 
1885   if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1886     pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
1887   else
1888     pbi->reg_next_use = NULL;
1889 
1890   pbi->new_set = BITMAP_XMALLOC ();
1891 
1892 #ifdef HAVE_conditional_execution
1893   pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
1894 				       free_reg_cond_life_info);
1895   pbi->reg_cond_reg = BITMAP_XMALLOC ();
1896 
1897   /* If this block ends in a conditional branch, for each register live
1898      from one side of the branch and not the other, record the register
1899      as conditionally dead.  */
1900   if (GET_CODE (bb->end) == JUMP_INSN
1901       && any_condjump_p (bb->end))
1902     {
1903       regset_head diff_head;
1904       regset diff = INITIALIZE_REG_SET (diff_head);
1905       basic_block bb_true, bb_false;
1906       rtx cond_true, cond_false, set_src;
1907       int i;
1908 
1909       /* Identify the successor blocks.  */
1910       bb_true = bb->succ->dest;
1911       if (bb->succ->succ_next != NULL)
1912 	{
1913 	  bb_false = bb->succ->succ_next->dest;
1914 
1915 	  if (bb->succ->flags & EDGE_FALLTHRU)
1916 	    {
1917 	      basic_block t = bb_false;
1918 	      bb_false = bb_true;
1919 	      bb_true = t;
1920 	    }
1921 	  else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
1922 	    abort ();
1923 	}
1924       else
1925 	{
1926 	  /* This can happen with a conditional jump to the next insn.  */
1927 	  if (JUMP_LABEL (bb->end) != bb_true->head)
1928 	    abort ();
1929 
1930 	  /* Simplest way to do nothing.  */
1931 	  bb_false = bb_true;
1932 	}
1933 
1934       /* Extract the condition from the branch.  */
1935       set_src = SET_SRC (pc_set (bb->end));
1936       cond_true = XEXP (set_src, 0);
1937       cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
1938 				   GET_MODE (cond_true), XEXP (cond_true, 0),
1939 				   XEXP (cond_true, 1));
1940       if (GET_CODE (XEXP (set_src, 1)) == PC)
1941 	{
1942 	  rtx t = cond_false;
1943 	  cond_false = cond_true;
1944 	  cond_true = t;
1945 	}
1946 
1947       /* Compute which register lead different lives in the successors.  */
1948       if (bitmap_operation (diff, bb_true->global_live_at_start,
1949 			    bb_false->global_live_at_start, BITMAP_XOR))
1950 	{
1951 	  rtx reg = XEXP (cond_true, 0);
1952 
1953 	  if (GET_CODE (reg) == SUBREG)
1954 	    reg = SUBREG_REG (reg);
1955 
1956 	  if (GET_CODE (reg) != REG)
1957 	    abort ();
1958 
1959 	  SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
1960 
1961 	  /* For each such register, mark it conditionally dead.  */
1962 	  EXECUTE_IF_SET_IN_REG_SET
1963 	    (diff, 0, i,
1964 	     {
1965 	       struct reg_cond_life_info *rcli;
1966 	       rtx cond;
1967 
1968 	       rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
1969 
1970 	       if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
1971 		 cond = cond_false;
1972 	       else
1973 		 cond = cond_true;
1974 	       rcli->condition = cond;
1975 	       rcli->stores = const0_rtx;
1976 	       rcli->orig_condition = cond;
1977 
1978 	       splay_tree_insert (pbi->reg_cond_dead, i,
1979 				  (splay_tree_value) rcli);
1980 	     });
1981 	}
1982 
1983       FREE_REG_SET (diff);
1984     }
1985 #endif
1986 
1987   /* If this block has no successors, any stores to the frame that aren't
1988      used later in the block are dead.  So make a pass over the block
1989      recording any such that are made and show them dead at the end.  We do
1990      a very conservative and simple job here.  */
1991   if (optimize
1992       && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1993 	    && (TYPE_RETURNS_STACK_DEPRESSED
1994 		(TREE_TYPE (current_function_decl))))
1995       && (flags & PROP_SCAN_DEAD_STORES)
1996       && (bb->succ == NULL
1997 	  || (bb->succ->succ_next == NULL
1998 	      && bb->succ->dest == EXIT_BLOCK_PTR
1999 	      && ! current_function_calls_eh_return)))
2000     {
2001       rtx insn, set;
2002       for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
2003 	if (GET_CODE (insn) == INSN
2004 	    && (set = single_set (insn))
2005 	    && GET_CODE (SET_DEST (set)) == MEM)
2006 	  {
2007 	    rtx mem = SET_DEST (set);
2008 	    rtx canon_mem = canon_rtx (mem);
2009 
2010 	    /* This optimization is performed by faking a store to the
2011 	       memory at the end of the block.  This doesn't work for
2012 	       unchanging memories because multiple stores to unchanging
2013 	       memory is illegal and alias analysis doesn't consider it.  */
2014 	    if (RTX_UNCHANGING_P (canon_mem))
2015 	      continue;
2016 
2017 	    if (XEXP (canon_mem, 0) == frame_pointer_rtx
2018 		|| (GET_CODE (XEXP (canon_mem, 0)) == PLUS
2019 		    && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
2020 		    && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
2021 	      add_to_mem_set_list (pbi, canon_mem);
2022 	  }
2023     }
2024 
2025   return pbi;
2026 }
2027 
2028 /* Release a propagate_block_info struct.  */
2029 
2030 void
free_propagate_block_info(pbi)2031 free_propagate_block_info (pbi)
2032      struct propagate_block_info *pbi;
2033 {
2034   free_EXPR_LIST_list (&pbi->mem_set_list);
2035 
2036   BITMAP_XFREE (pbi->new_set);
2037 
2038 #ifdef HAVE_conditional_execution
2039   splay_tree_delete (pbi->reg_cond_dead);
2040   BITMAP_XFREE (pbi->reg_cond_reg);
2041 #endif
2042 
2043   if (pbi->reg_next_use)
2044     free (pbi->reg_next_use);
2045 
2046   free (pbi);
2047 }
2048 
2049 /* Compute the registers live at the beginning of a basic block BB from
2050    those live at the end.
2051 
2052    When called, REG_LIVE contains those live at the end.  On return, it
2053    contains those live at the beginning.
2054 
2055    LOCAL_SET, if non-null, will be set with all registers killed
2056    unconditionally by this basic block.
2057    Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
2058    killed conditionally by this basic block.  If there is any unconditional
2059    set of a register, then the corresponding bit will be set in LOCAL_SET
2060    and cleared in COND_LOCAL_SET.
2061    It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set.  In this
2062    case, the resulting set will be equal to the union of the two sets that
2063    would otherwise be computed.
2064 
2065    Return nonzero if an INSN is deleted (i.e. by dead code removal).  */
2066 
2067 int
propagate_block(bb,live,local_set,cond_local_set,flags)2068 propagate_block (bb, live, local_set, cond_local_set, flags)
2069      basic_block bb;
2070      regset live;
2071      regset local_set;
2072      regset cond_local_set;
2073      int flags;
2074 {
2075   struct propagate_block_info *pbi;
2076   rtx insn, prev;
2077   int changed;
2078 
2079   pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
2080 
2081   if (flags & PROP_REG_INFO)
2082     {
2083       int i;
2084 
2085       /* Process the regs live at the end of the block.
2086 	 Mark them as not local to any one basic block.  */
2087       EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
2088 				 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2089     }
2090 
2091   /* Scan the block an insn at a time from end to beginning.  */
2092 
2093   changed = 0;
2094   for (insn = bb->end;; insn = prev)
2095     {
2096       /* If this is a call to `setjmp' et al, warn if any
2097 	 non-volatile datum is live.  */
2098       if ((flags & PROP_REG_INFO)
2099 	  && GET_CODE (insn) == CALL_INSN
2100 	  && find_reg_note (insn, REG_SETJMP, NULL))
2101 	IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2102 
2103       prev = propagate_one_insn (pbi, insn);
2104       changed |= NEXT_INSN (prev) != insn;
2105 
2106       if (insn == bb->head)
2107 	break;
2108     }
2109 
2110   free_propagate_block_info (pbi);
2111 
2112   return changed;
2113 }
2114 
2115 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2116    (SET expressions whose destinations are registers dead after the insn).
2117    NEEDED is the regset that says which regs are alive after the insn.
2118 
2119    Unless CALL_OK is nonzero, an insn is needed if it contains a CALL.
2120 
2121    If X is the entire body of an insn, NOTES contains the reg notes
2122    pertaining to the insn.  */
2123 
2124 static int
insn_dead_p(pbi,x,call_ok,notes)2125 insn_dead_p (pbi, x, call_ok, notes)
2126      struct propagate_block_info *pbi;
2127      rtx x;
2128      int call_ok;
2129      rtx notes ATTRIBUTE_UNUSED;
2130 {
2131   enum rtx_code code = GET_CODE (x);
2132 
2133   /* Don't eliminate insns that may trap.  */
2134   if (flag_non_call_exceptions && may_trap_p (x))
2135     return 0;
2136 
2137 #ifdef AUTO_INC_DEC
2138   /* As flow is invoked after combine, we must take existing AUTO_INC
2139      expressions into account.  */
2140   for (; notes; notes = XEXP (notes, 1))
2141     {
2142       if (REG_NOTE_KIND (notes) == REG_INC)
2143 	{
2144 	  int regno = REGNO (XEXP (notes, 0));
2145 
2146 	  /* Don't delete insns to set global regs.  */
2147 	  if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2148 	      || REGNO_REG_SET_P (pbi->reg_live, regno))
2149 	    return 0;
2150 	}
2151     }
2152 #endif
2153 
2154   /* If setting something that's a reg or part of one,
2155      see if that register's altered value will be live.  */
2156 
2157   if (code == SET)
2158     {
2159       rtx r = SET_DEST (x);
2160 
2161 #ifdef HAVE_cc0
2162       if (GET_CODE (r) == CC0)
2163 	return ! pbi->cc0_live;
2164 #endif
2165 
2166       /* A SET that is a subroutine call cannot be dead.  */
2167       if (GET_CODE (SET_SRC (x)) == CALL)
2168 	{
2169 	  if (! call_ok)
2170 	    return 0;
2171 	}
2172 
2173       /* Don't eliminate loads from volatile memory or volatile asms.  */
2174       else if (volatile_refs_p (SET_SRC (x)))
2175 	return 0;
2176 
2177       if (GET_CODE (r) == MEM)
2178 	{
2179 	  rtx temp, canon_r;
2180 
2181 	  if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
2182 	    return 0;
2183 
2184 	  canon_r = canon_rtx (r);
2185 
2186 	  /* Walk the set of memory locations we are currently tracking
2187 	     and see if one is an identical match to this memory location.
2188 	     If so, this memory write is dead (remember, we're walking
2189 	     backwards from the end of the block to the start).  Since
2190 	     rtx_equal_p does not check the alias set or flags, we also
2191 	     must have the potential for them to conflict (anti_dependence).  */
2192 	  for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2193 	    if (anti_dependence (r, XEXP (temp, 0)))
2194 	      {
2195 		rtx mem = XEXP (temp, 0);
2196 
2197 		if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
2198 		    && (GET_MODE_SIZE (GET_MODE (canon_r))
2199 			<= GET_MODE_SIZE (GET_MODE (mem))))
2200 		  return 1;
2201 
2202 #ifdef AUTO_INC_DEC
2203 		/* Check if memory reference matches an auto increment. Only
2204 		   post increment/decrement or modify are valid.  */
2205 		if (GET_MODE (mem) == GET_MODE (r)
2206 		    && (GET_CODE (XEXP (mem, 0)) == POST_DEC
2207 			|| GET_CODE (XEXP (mem, 0)) == POST_INC
2208 			|| GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
2209 		    && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
2210 		    && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
2211 		  return 1;
2212 #endif
2213 	      }
2214 	}
2215       else
2216 	{
2217 	  while (GET_CODE (r) == SUBREG
2218 		 || GET_CODE (r) == STRICT_LOW_PART
2219 		 || GET_CODE (r) == ZERO_EXTRACT)
2220 	    r = XEXP (r, 0);
2221 
2222 	  if (GET_CODE (r) == REG)
2223 	    {
2224 	      int regno = REGNO (r);
2225 
2226 	      /* Obvious.  */
2227 	      if (REGNO_REG_SET_P (pbi->reg_live, regno))
2228 		return 0;
2229 
2230 	      /* If this is a hard register, verify that subsequent
2231 		 words are not needed.  */
2232 	      if (regno < FIRST_PSEUDO_REGISTER)
2233 		{
2234 		  int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2235 
2236 		  while (--n > 0)
2237 		    if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
2238 		      return 0;
2239 		}
2240 
2241 	      /* Don't delete insns to set global regs.  */
2242 	      if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2243 		return 0;
2244 
2245 	      /* Make sure insns to set the stack pointer aren't deleted.  */
2246 	      if (regno == STACK_POINTER_REGNUM)
2247 		return 0;
2248 
2249 	      /* ??? These bits might be redundant with the force live bits
2250 		 in calculate_global_regs_live.  We would delete from
2251 		 sequential sets; whether this actually affects real code
2252 		 for anything but the stack pointer I don't know.  */
2253 	      /* Make sure insns to set the frame pointer aren't deleted.  */
2254 	      if (regno == FRAME_POINTER_REGNUM
2255 		  && (! reload_completed || frame_pointer_needed))
2256 		return 0;
2257 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2258 	      if (regno == HARD_FRAME_POINTER_REGNUM
2259 		  && (! reload_completed || frame_pointer_needed))
2260 		return 0;
2261 #endif
2262 
2263 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2264 	      /* Make sure insns to set arg pointer are never deleted
2265 		 (if the arg pointer isn't fixed, there will be a USE
2266 		 for it, so we can treat it normally).  */
2267 	      if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2268 		return 0;
2269 #endif
2270 
2271 	      /* Otherwise, the set is dead.  */
2272 	      return 1;
2273 	    }
2274 	}
2275     }
2276 
2277   /* If performing several activities, insn is dead if each activity
2278      is individually dead.  Also, CLOBBERs and USEs can be ignored; a
2279      CLOBBER or USE that's inside a PARALLEL doesn't make the insn
2280      worth keeping.  */
2281   else if (code == PARALLEL)
2282     {
2283       int i = XVECLEN (x, 0);
2284 
2285       for (i--; i >= 0; i--)
2286 	if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2287 	    && GET_CODE (XVECEXP (x, 0, i)) != USE
2288 	    && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
2289 	  return 0;
2290 
2291       return 1;
2292     }
2293 
2294   /* A CLOBBER of a pseudo-register that is dead serves no purpose.  That
2295      is not necessarily true for hard registers until after reload.  */
2296   else if (code == CLOBBER)
2297     {
2298       if (GET_CODE (XEXP (x, 0)) == REG
2299 	  && (REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2300 	      || reload_completed)
2301 	  && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
2302 	return 1;
2303     }
2304 
2305   /* ??? A base USE is a historical relic.  It ought not be needed anymore.
2306      Instances where it is still used are either (1) temporary and the USE
2307      escaped the pass, (2) cruft and the USE need not be emitted anymore,
2308      or (3) hiding bugs elsewhere that are not properly representing data
2309      flow.  */
2310 
2311   return 0;
2312 }
2313 
2314 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
2315    return 1 if the entire library call is dead.
2316    This is true if INSN copies a register (hard or pseudo)
2317    and if the hard return reg of the call insn is dead.
2318    (The caller should have tested the destination of the SET inside
2319    INSN already for death.)
2320 
2321    If this insn doesn't just copy a register, then we don't
2322    have an ordinary libcall.  In that case, cse could not have
2323    managed to substitute the source for the dest later on,
2324    so we can assume the libcall is dead.
2325 
2326    PBI is the block info giving pseudoregs live before this insn.
2327    NOTE is the REG_RETVAL note of the insn.  */
2328 
2329 static int
libcall_dead_p(pbi,note,insn)2330 libcall_dead_p (pbi, note, insn)
2331      struct propagate_block_info *pbi;
2332      rtx note;
2333      rtx insn;
2334 {
2335   rtx x = single_set (insn);
2336 
2337   if (x)
2338     {
2339       rtx r = SET_SRC (x);
2340 
2341       if (GET_CODE (r) == REG)
2342 	{
2343 	  rtx call = XEXP (note, 0);
2344 	  rtx call_pat;
2345 	  int i;
2346 
2347 	  /* Find the call insn.  */
2348 	  while (call != insn && GET_CODE (call) != CALL_INSN)
2349 	    call = NEXT_INSN (call);
2350 
2351 	  /* If there is none, do nothing special,
2352 	     since ordinary death handling can understand these insns.  */
2353 	  if (call == insn)
2354 	    return 0;
2355 
2356 	  /* See if the hard reg holding the value is dead.
2357 	     If this is a PARALLEL, find the call within it.  */
2358 	  call_pat = PATTERN (call);
2359 	  if (GET_CODE (call_pat) == PARALLEL)
2360 	    {
2361 	      for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
2362 		if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
2363 		    && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
2364 		  break;
2365 
2366 	      /* This may be a library call that is returning a value
2367 		 via invisible pointer.  Do nothing special, since
2368 		 ordinary death handling can understand these insns.  */
2369 	      if (i < 0)
2370 		return 0;
2371 
2372 	      call_pat = XVECEXP (call_pat, 0, i);
2373 	    }
2374 
2375 	  return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
2376 	}
2377     }
2378   return 1;
2379 }
2380 
2381 /* Return 1 if register REGNO was used before it was set, i.e. if it is
2382    live at function entry.  Don't count global register variables, variables
2383    in registers that can be used for function arg passing, or variables in
2384    fixed hard registers.  */
2385 
2386 int
regno_uninitialized(regno)2387 regno_uninitialized (regno)
2388      unsigned int regno;
2389 {
2390   if (n_basic_blocks == 0
2391       || (regno < FIRST_PSEUDO_REGISTER
2392 	  && (global_regs[regno]
2393 	      || fixed_regs[regno]
2394 	      || FUNCTION_ARG_REGNO_P (regno))))
2395     return 0;
2396 
2397   return REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno);
2398 }
2399 
2400 /* 1 if register REGNO was alive at a place where `setjmp' was called
2401    and was set more than once or is an argument.
2402    Such regs may be clobbered by `longjmp'.  */
2403 
2404 int
regno_clobbered_at_setjmp(regno)2405 regno_clobbered_at_setjmp (regno)
2406      int regno;
2407 {
2408   if (n_basic_blocks == 0)
2409     return 0;
2410 
2411   return ((REG_N_SETS (regno) > 1
2412 	   || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno))
2413 	  && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2414 }
2415 
2416 /* Add MEM to PBI->MEM_SET_LIST.  MEM should be canonical.  Respect the
2417    maximal list size; look for overlaps in mode and select the largest.  */
2418 static void
add_to_mem_set_list(pbi,mem)2419 add_to_mem_set_list (pbi, mem)
2420      struct propagate_block_info *pbi;
2421      rtx mem;
2422 {
2423   rtx i;
2424 
2425   /* We don't know how large a BLKmode store is, so we must not
2426      take them into consideration.  */
2427   if (GET_MODE (mem) == BLKmode)
2428     return;
2429 
2430   for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2431     {
2432       rtx e = XEXP (i, 0);
2433       if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2434 	{
2435 	  if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2436 	    {
2437 #ifdef AUTO_INC_DEC
2438 	      /* If we must store a copy of the mem, we can just modify
2439 		 the mode of the stored copy.  */
2440 	      if (pbi->flags & PROP_AUTOINC)
2441 	        PUT_MODE (e, GET_MODE (mem));
2442 	      else
2443 #endif
2444 	        XEXP (i, 0) = mem;
2445 	    }
2446 	  return;
2447 	}
2448     }
2449 
2450   if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
2451     {
2452 #ifdef AUTO_INC_DEC
2453       /* Store a copy of mem, otherwise the address may be
2454 	 scrogged by find_auto_inc.  */
2455       if (pbi->flags & PROP_AUTOINC)
2456 	mem = shallow_copy_rtx (mem);
2457 #endif
2458       pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
2459       pbi->mem_set_list_len++;
2460     }
2461 }
2462 
2463 /* INSN references memory, possibly using autoincrement addressing modes.
2464    Find any entries on the mem_set_list that need to be invalidated due
2465    to an address change.  */
2466 
2467 static int
invalidate_mems_from_autoinc(px,data)2468 invalidate_mems_from_autoinc (px, data)
2469      rtx *px;
2470      void *data;
2471 {
2472   rtx x = *px;
2473   struct propagate_block_info *pbi = data;
2474 
2475   if (GET_RTX_CLASS (GET_CODE (x)) == 'a')
2476     {
2477       invalidate_mems_from_set (pbi, XEXP (x, 0));
2478       return -1;
2479     }
2480 
2481   return 0;
2482 }
2483 
2484 /* EXP is a REG.  Remove any dependent entries from pbi->mem_set_list.  */
2485 
2486 static void
invalidate_mems_from_set(pbi,exp)2487 invalidate_mems_from_set (pbi, exp)
2488      struct propagate_block_info *pbi;
2489      rtx exp;
2490 {
2491   rtx temp = pbi->mem_set_list;
2492   rtx prev = NULL_RTX;
2493   rtx next;
2494 
2495   while (temp)
2496     {
2497       next = XEXP (temp, 1);
2498       if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
2499 	{
2500 	  /* Splice this entry out of the list.  */
2501 	  if (prev)
2502 	    XEXP (prev, 1) = next;
2503 	  else
2504 	    pbi->mem_set_list = next;
2505 	  free_EXPR_LIST_node (temp);
2506 	  pbi->mem_set_list_len--;
2507 	}
2508       else
2509 	prev = temp;
2510       temp = next;
2511     }
2512 }
2513 
2514 /* Process the registers that are set within X.  Their bits are set to
2515    1 in the regset DEAD, because they are dead prior to this insn.
2516 
2517    If INSN is nonzero, it is the insn being processed.
2518 
2519    FLAGS is the set of operations to perform.  */
2520 
2521 static void
mark_set_regs(pbi,x,insn)2522 mark_set_regs (pbi, x, insn)
2523      struct propagate_block_info *pbi;
2524      rtx x, insn;
2525 {
2526   rtx cond = NULL_RTX;
2527   rtx link;
2528   enum rtx_code code;
2529 
2530   if (insn)
2531     for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2532       {
2533 	if (REG_NOTE_KIND (link) == REG_INC)
2534 	  mark_set_1 (pbi, SET, XEXP (link, 0),
2535 		      (GET_CODE (x) == COND_EXEC
2536 		       ? COND_EXEC_TEST (x) : NULL_RTX),
2537 		      insn, pbi->flags);
2538       }
2539  retry:
2540   switch (code = GET_CODE (x))
2541     {
2542     case SET:
2543     case CLOBBER:
2544       mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
2545       return;
2546 
2547     case COND_EXEC:
2548       cond = COND_EXEC_TEST (x);
2549       x = COND_EXEC_CODE (x);
2550       goto retry;
2551 
2552     case PARALLEL:
2553       {
2554 	int i;
2555 
2556 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2557 	  {
2558 	    rtx sub = XVECEXP (x, 0, i);
2559 	    switch (code = GET_CODE (sub))
2560 	      {
2561 	      case COND_EXEC:
2562 		if (cond != NULL_RTX)
2563 		  abort ();
2564 
2565 		cond = COND_EXEC_TEST (sub);
2566 		sub = COND_EXEC_CODE (sub);
2567 		if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
2568 		  break;
2569 		/* Fall through.  */
2570 
2571 	      case SET:
2572 	      case CLOBBER:
2573 		mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
2574 		break;
2575 
2576 	      default:
2577 		break;
2578 	      }
2579 	  }
2580 	break;
2581       }
2582 
2583     default:
2584       break;
2585     }
2586 }
2587 
2588 /* Process a single set, which appears in INSN.  REG (which may not
2589    actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
2590    being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
2591    If the set is conditional (because it appear in a COND_EXEC), COND
2592    will be the condition.  */
2593 
2594 static void
mark_set_1(pbi,code,reg,cond,insn,flags)2595 mark_set_1 (pbi, code, reg, cond, insn, flags)
2596      struct propagate_block_info *pbi;
2597      enum rtx_code code;
2598      rtx reg, cond, insn;
2599      int flags;
2600 {
2601   int regno_first = -1, regno_last = -1;
2602   unsigned long not_dead = 0;
2603   int i;
2604 
2605   /* Modifying just one hardware register of a multi-reg value or just a
2606      byte field of a register does not mean the value from before this insn
2607      is now dead.  Of course, if it was dead after it's unused now.  */
2608 
2609   switch (GET_CODE (reg))
2610     {
2611     case PARALLEL:
2612       /* Some targets place small structures in registers for return values of
2613 	 functions.  We have to detect this case specially here to get correct
2614 	 flow information.  */
2615       for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2616 	if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
2617 	  mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
2618 		      flags);
2619       return;
2620 
2621     case ZERO_EXTRACT:
2622     case SIGN_EXTRACT:
2623     case STRICT_LOW_PART:
2624       /* ??? Assumes STRICT_LOW_PART not used on multi-word registers.  */
2625       do
2626 	reg = XEXP (reg, 0);
2627       while (GET_CODE (reg) == SUBREG
2628 	     || GET_CODE (reg) == ZERO_EXTRACT
2629 	     || GET_CODE (reg) == SIGN_EXTRACT
2630 	     || GET_CODE (reg) == STRICT_LOW_PART);
2631       if (GET_CODE (reg) == MEM)
2632 	break;
2633       not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
2634       /* Fall through.  */
2635 
2636     case REG:
2637       regno_last = regno_first = REGNO (reg);
2638       if (regno_first < FIRST_PSEUDO_REGISTER)
2639 	regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
2640       break;
2641 
2642     case SUBREG:
2643       if (GET_CODE (SUBREG_REG (reg)) == REG)
2644 	{
2645 	  enum machine_mode outer_mode = GET_MODE (reg);
2646 	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2647 
2648 	  /* Identify the range of registers affected.  This is moderately
2649 	     tricky for hard registers.  See alter_subreg.  */
2650 
2651 	  regno_last = regno_first = REGNO (SUBREG_REG (reg));
2652 	  if (regno_first < FIRST_PSEUDO_REGISTER)
2653 	    {
2654 	      regno_first += subreg_regno_offset (regno_first, inner_mode,
2655 						  SUBREG_BYTE (reg),
2656 						  outer_mode);
2657 	      regno_last = (regno_first
2658 			    + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
2659 
2660 	      /* Since we've just adjusted the register number ranges, make
2661 		 sure REG matches.  Otherwise some_was_live will be clear
2662 		 when it shouldn't have been, and we'll create incorrect
2663 		 REG_UNUSED notes.  */
2664 	      reg = gen_rtx_REG (outer_mode, regno_first);
2665 	    }
2666 	  else
2667 	    {
2668 	      /* If the number of words in the subreg is less than the number
2669 		 of words in the full register, we have a well-defined partial
2670 		 set.  Otherwise the high bits are undefined.
2671 
2672 		 This is only really applicable to pseudos, since we just took
2673 		 care of multi-word hard registers.  */
2674 	      if (((GET_MODE_SIZE (outer_mode)
2675 		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2676 		  < ((GET_MODE_SIZE (inner_mode)
2677 		      + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
2678 		not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
2679 							    regno_first);
2680 
2681 	      reg = SUBREG_REG (reg);
2682 	    }
2683 	}
2684       else
2685 	reg = SUBREG_REG (reg);
2686       break;
2687 
2688     default:
2689       break;
2690     }
2691 
2692   /* If this set is a MEM, then it kills any aliased writes.
2693      If this set is a REG, then it kills any MEMs which use the reg.  */
2694   if (optimize && (flags & PROP_SCAN_DEAD_STORES))
2695     {
2696       if (GET_CODE (reg) == REG)
2697 	invalidate_mems_from_set (pbi, reg);
2698 
2699       /* If the memory reference had embedded side effects (autoincrement
2700 	 address modes.  Then we may need to kill some entries on the
2701 	 memory set list.  */
2702       if (insn && GET_CODE (reg) == MEM)
2703 	for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
2704 
2705 #ifndef FLOW_DEAD_STORES_BROKEN_P
2706       if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2707 	  /* ??? With more effort we could track conditional memory life.  */
2708 	  && ! cond)
2709 	add_to_mem_set_list (pbi, canon_rtx (reg));
2710 #endif
2711     }
2712 
2713   if (GET_CODE (reg) == REG
2714       && ! (regno_first == FRAME_POINTER_REGNUM
2715 	    && (! reload_completed || frame_pointer_needed))
2716 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2717       && ! (regno_first == HARD_FRAME_POINTER_REGNUM
2718 	    && (! reload_completed || frame_pointer_needed))
2719 #endif
2720 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2721       && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
2722 #endif
2723       )
2724     {
2725       int some_was_live = 0, some_was_dead = 0;
2726 
2727       for (i = regno_first; i <= regno_last; ++i)
2728 	{
2729 	  int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
2730 	  if (pbi->local_set)
2731 	    {
2732 	      /* Order of the set operation matters here since both
2733 		 sets may be the same.  */
2734 	      CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
2735 	      if (cond != NULL_RTX
2736 		  && ! REGNO_REG_SET_P (pbi->local_set, i))
2737 		SET_REGNO_REG_SET (pbi->cond_local_set, i);
2738 	      else
2739 		SET_REGNO_REG_SET (pbi->local_set, i);
2740 	    }
2741 	  if (code != CLOBBER)
2742 	    SET_REGNO_REG_SET (pbi->new_set, i);
2743 
2744 	  some_was_live |= needed_regno;
2745 	  some_was_dead |= ! needed_regno;
2746 	}
2747 
2748 #ifdef HAVE_conditional_execution
2749       /* Consider conditional death in deciding that the register needs
2750 	 a death note.  */
2751       if (some_was_live && ! not_dead
2752 	  /* The stack pointer is never dead.  Well, not strictly true,
2753 	     but it's very difficult to tell from here.  Hopefully
2754 	     combine_stack_adjustments will fix up the most egregious
2755 	     errors.  */
2756 	  && regno_first != STACK_POINTER_REGNUM)
2757 	{
2758 	  for (i = regno_first; i <= regno_last; ++i)
2759 	    if (! mark_regno_cond_dead (pbi, i, cond))
2760 	      not_dead |= ((unsigned long) 1) << (i - regno_first);
2761 	}
2762 #endif
2763 
2764       /* Additional data to record if this is the final pass.  */
2765       if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
2766 		   | PROP_DEATH_NOTES | PROP_AUTOINC))
2767 	{
2768 	  rtx y;
2769 	  int blocknum = pbi->bb->index;
2770 
2771 	  y = NULL_RTX;
2772 	  if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2773 	    {
2774 	      y = pbi->reg_next_use[regno_first];
2775 
2776 	      /* The next use is no longer next, since a store intervenes.  */
2777 	      for (i = regno_first; i <= regno_last; ++i)
2778 		pbi->reg_next_use[i] = 0;
2779 	    }
2780 
2781 	  if (flags & PROP_REG_INFO)
2782 	    {
2783 	      for (i = regno_first; i <= regno_last; ++i)
2784 		{
2785 		  /* Count (weighted) references, stores, etc.  This counts a
2786 		     register twice if it is modified, but that is correct.  */
2787 		  REG_N_SETS (i) += 1;
2788 		  REG_N_REFS (i) += 1;
2789 		  REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
2790 
2791 	          /* The insns where a reg is live are normally counted
2792 		     elsewhere, but we want the count to include the insn
2793 		     where the reg is set, and the normal counting mechanism
2794 		     would not count it.  */
2795 		  REG_LIVE_LENGTH (i) += 1;
2796 		}
2797 
2798 	      /* If this is a hard reg, record this function uses the reg.  */
2799 	      if (regno_first < FIRST_PSEUDO_REGISTER)
2800 		{
2801 		  for (i = regno_first; i <= regno_last; i++)
2802 		    regs_ever_live[i] = 1;
2803 		}
2804 	      else
2805 		{
2806 		  /* Keep track of which basic blocks each reg appears in.  */
2807 		  if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
2808 		    REG_BASIC_BLOCK (regno_first) = blocknum;
2809 		  else if (REG_BASIC_BLOCK (regno_first) != blocknum)
2810 		    REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2811 		}
2812 	    }
2813 
2814 	  if (! some_was_dead)
2815 	    {
2816 	      if (flags & PROP_LOG_LINKS)
2817 		{
2818 		  /* Make a logical link from the next following insn
2819 		     that uses this register, back to this insn.
2820 		     The following insns have already been processed.
2821 
2822 		     We don't build a LOG_LINK for hard registers containing
2823 		     in ASM_OPERANDs.  If these registers get replaced,
2824 		     we might wind up changing the semantics of the insn,
2825 		     even if reload can make what appear to be valid
2826 		     assignments later.
2827 
2828 		     We don't build a LOG_LINK for global registers to
2829 		     or from a function call.  We don't want to let
2830 		     combine think that it knows what is going on with
2831 		     global registers.  */
2832 		  if (y && (BLOCK_NUM (y) == blocknum)
2833 		      && (regno_first >= FIRST_PSEUDO_REGISTER
2834 			  || (asm_noperands (PATTERN (y)) < 0
2835 			      && ! ((GET_CODE (insn) == CALL_INSN
2836 				     || GET_CODE (y) == CALL_INSN)
2837 				    && global_regs[regno_first]))))
2838 		    LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
2839 		}
2840 	    }
2841 	  else if (not_dead)
2842 	    ;
2843 	  else if (! some_was_live)
2844 	    {
2845 	      if (flags & PROP_REG_INFO)
2846 		REG_N_DEATHS (regno_first) += 1;
2847 
2848 	      if (flags & PROP_DEATH_NOTES)
2849 		{
2850 		  /* Note that dead stores have already been deleted
2851 		     when possible.  If we get here, we have found a
2852 		     dead store that cannot be eliminated (because the
2853 		     same insn does something useful).  Indicate this
2854 		     by marking the reg being set as dying here.  */
2855 		  REG_NOTES (insn)
2856 		    = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2857 		}
2858 	    }
2859 	  else
2860 	    {
2861 	      if (flags & PROP_DEATH_NOTES)
2862 		{
2863 		  /* This is a case where we have a multi-word hard register
2864 		     and some, but not all, of the words of the register are
2865 		     needed in subsequent insns.  Write REG_UNUSED notes
2866 		     for those parts that were not needed.  This case should
2867 		     be rare.  */
2868 
2869 		  for (i = regno_first; i <= regno_last; ++i)
2870 		    if (! REGNO_REG_SET_P (pbi->reg_live, i))
2871 		      REG_NOTES (insn)
2872 			= alloc_EXPR_LIST (REG_UNUSED,
2873 					   regno_reg_rtx[i],
2874 					   REG_NOTES (insn));
2875 		}
2876 	    }
2877 	}
2878 
2879       /* Mark the register as being dead.  */
2880       if (some_was_live
2881 	  /* The stack pointer is never dead.  Well, not strictly true,
2882 	     but it's very difficult to tell from here.  Hopefully
2883 	     combine_stack_adjustments will fix up the most egregious
2884 	     errors.  */
2885 	  && regno_first != STACK_POINTER_REGNUM)
2886 	{
2887 	  for (i = regno_first; i <= regno_last; ++i)
2888 	    if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2889 	      CLEAR_REGNO_REG_SET (pbi->reg_live, i);
2890 	}
2891     }
2892   else if (GET_CODE (reg) == REG)
2893     {
2894       if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2895 	pbi->reg_next_use[regno_first] = 0;
2896     }
2897 
2898   /* If this is the last pass and this is a SCRATCH, show it will be dying
2899      here and count it.  */
2900   else if (GET_CODE (reg) == SCRATCH)
2901     {
2902       if (flags & PROP_DEATH_NOTES)
2903 	REG_NOTES (insn)
2904 	  = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2905     }
2906 }
2907 
2908 #ifdef HAVE_conditional_execution
2909 /* Mark REGNO conditionally dead.
2910    Return true if the register is now unconditionally dead.  */
2911 
2912 static int
mark_regno_cond_dead(pbi,regno,cond)2913 mark_regno_cond_dead (pbi, regno, cond)
2914      struct propagate_block_info *pbi;
2915      int regno;
2916      rtx cond;
2917 {
2918   /* If this is a store to a predicate register, the value of the
2919      predicate is changing, we don't know that the predicate as seen
2920      before is the same as that seen after.  Flush all dependent
2921      conditions from reg_cond_dead.  This will make all such
2922      conditionally live registers unconditionally live.  */
2923   if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
2924     flush_reg_cond_reg (pbi, regno);
2925 
2926   /* If this is an unconditional store, remove any conditional
2927      life that may have existed.  */
2928   if (cond == NULL_RTX)
2929     splay_tree_remove (pbi->reg_cond_dead, regno);
2930   else
2931     {
2932       splay_tree_node node;
2933       struct reg_cond_life_info *rcli;
2934       rtx ncond;
2935 
2936       /* Otherwise this is a conditional set.  Record that fact.
2937 	 It may have been conditionally used, or there may be a
2938 	 subsequent set with a complimentary condition.  */
2939 
2940       node = splay_tree_lookup (pbi->reg_cond_dead, regno);
2941       if (node == NULL)
2942 	{
2943 	  /* The register was unconditionally live previously.
2944 	     Record the current condition as the condition under
2945 	     which it is dead.  */
2946 	  rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
2947 	  rcli->condition = cond;
2948 	  rcli->stores = cond;
2949 	  rcli->orig_condition = const0_rtx;
2950 	  splay_tree_insert (pbi->reg_cond_dead, regno,
2951 			     (splay_tree_value) rcli);
2952 
2953 	  SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2954 
2955 	  /* Not unconditionally dead.  */
2956 	  return 0;
2957 	}
2958       else
2959 	{
2960 	  /* The register was conditionally live previously.
2961 	     Add the new condition to the old.  */
2962 	  rcli = (struct reg_cond_life_info *) node->value;
2963 	  ncond = rcli->condition;
2964 	  ncond = ior_reg_cond (ncond, cond, 1);
2965 	  if (rcli->stores == const0_rtx)
2966 	    rcli->stores = cond;
2967 	  else if (rcli->stores != const1_rtx)
2968 	    rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
2969 
2970 	  /* If the register is now unconditionally dead, remove the entry
2971 	     in the splay_tree.  A register is unconditionally dead if the
2972 	     dead condition ncond is true.  A register is also unconditionally
2973 	     dead if the sum of all conditional stores is an unconditional
2974 	     store (stores is true), and the dead condition is identically the
2975 	     same as the original dead condition initialized at the end of
2976 	     the block.  This is a pointer compare, not an rtx_equal_p
2977 	     compare.  */
2978 	  if (ncond == const1_rtx
2979 	      || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
2980 	    splay_tree_remove (pbi->reg_cond_dead, regno);
2981 	  else
2982 	    {
2983 	      rcli->condition = ncond;
2984 
2985 	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2986 
2987 	      /* Not unconditionally dead.  */
2988 	      return 0;
2989 	    }
2990 	}
2991     }
2992 
2993   return 1;
2994 }
2995 
2996 /* Called from splay_tree_delete for pbi->reg_cond_life.  */
2997 
2998 static void
free_reg_cond_life_info(value)2999 free_reg_cond_life_info (value)
3000      splay_tree_value value;
3001 {
3002   struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
3003   free (rcli);
3004 }
3005 
3006 /* Helper function for flush_reg_cond_reg.  */
3007 
3008 static int
flush_reg_cond_reg_1(node,data)3009 flush_reg_cond_reg_1 (node, data)
3010      splay_tree_node node;
3011      void *data;
3012 {
3013   struct reg_cond_life_info *rcli;
3014   int *xdata = (int *) data;
3015   unsigned int regno = xdata[0];
3016 
3017   /* Don't need to search if last flushed value was farther on in
3018      the in-order traversal.  */
3019   if (xdata[1] >= (int) node->key)
3020     return 0;
3021 
3022   /* Splice out portions of the expression that refer to regno.  */
3023   rcli = (struct reg_cond_life_info *) node->value;
3024   rcli->condition = elim_reg_cond (rcli->condition, regno);
3025   if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
3026     rcli->stores = elim_reg_cond (rcli->stores, regno);
3027 
3028   /* If the entire condition is now false, signal the node to be removed.  */
3029   if (rcli->condition == const0_rtx)
3030     {
3031       xdata[1] = node->key;
3032       return -1;
3033     }
3034   else if (rcli->condition == const1_rtx)
3035     abort ();
3036 
3037   return 0;
3038 }
3039 
3040 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE.  */
3041 
3042 static void
flush_reg_cond_reg(pbi,regno)3043 flush_reg_cond_reg (pbi, regno)
3044      struct propagate_block_info *pbi;
3045      int regno;
3046 {
3047   int pair[2];
3048 
3049   pair[0] = regno;
3050   pair[1] = -1;
3051   while (splay_tree_foreach (pbi->reg_cond_dead,
3052 			     flush_reg_cond_reg_1, pair) == -1)
3053     splay_tree_remove (pbi->reg_cond_dead, pair[1]);
3054 
3055   CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
3056 }
3057 
3058 /* Logical arithmetic on predicate conditions.  IOR, NOT and AND.
3059    For ior/and, the ADD flag determines whether we want to add the new
3060    condition X to the old one unconditionally.  If it is zero, we will
3061    only return a new expression if X allows us to simplify part of
3062    OLD, otherwise we return NULL to the caller.
3063    If ADD is nonzero, we will return a new condition in all cases.  The
3064    toplevel caller of one of these functions should always pass 1 for
3065    ADD.  */
3066 
3067 static rtx
ior_reg_cond(old,x,add)3068 ior_reg_cond (old, x, add)
3069      rtx old, x;
3070      int add;
3071 {
3072   rtx op0, op1;
3073 
3074   if (GET_RTX_CLASS (GET_CODE (old)) == '<')
3075     {
3076       if (GET_RTX_CLASS (GET_CODE (x)) == '<'
3077 	  && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
3078 	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3079 	return const1_rtx;
3080       if (GET_CODE (x) == GET_CODE (old)
3081 	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3082 	return old;
3083       if (! add)
3084 	return NULL;
3085       return gen_rtx_IOR (0, old, x);
3086     }
3087 
3088   switch (GET_CODE (old))
3089     {
3090     case IOR:
3091       op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3092       op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3093       if (op0 != NULL || op1 != NULL)
3094 	{
3095 	  if (op0 == const0_rtx)
3096 	    return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3097 	  if (op1 == const0_rtx)
3098 	    return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3099 	  if (op0 == const1_rtx || op1 == const1_rtx)
3100 	    return const1_rtx;
3101 	  if (op0 == NULL)
3102 	    op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3103 	  else if (rtx_equal_p (x, op0))
3104 	    /* (x | A) | x ~ (x | A).  */
3105 	    return old;
3106 	  if (op1 == NULL)
3107 	    op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3108 	  else if (rtx_equal_p (x, op1))
3109 	    /* (A | x) | x ~ (A | x).  */
3110 	    return old;
3111 	  return gen_rtx_IOR (0, op0, op1);
3112 	}
3113       if (! add)
3114 	return NULL;
3115       return gen_rtx_IOR (0, old, x);
3116 
3117     case AND:
3118       op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3119       op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3120       if (op0 != NULL || op1 != NULL)
3121 	{
3122 	  if (op0 == const1_rtx)
3123 	    return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3124 	  if (op1 == const1_rtx)
3125 	    return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3126 	  if (op0 == const0_rtx || op1 == const0_rtx)
3127 	    return const0_rtx;
3128 	  if (op0 == NULL)
3129 	    op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3130 	  else if (rtx_equal_p (x, op0))
3131 	    /* (x & A) | x ~ x.  */
3132 	    return op0;
3133 	  if (op1 == NULL)
3134 	    op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3135 	  else if (rtx_equal_p (x, op1))
3136 	    /* (A & x) | x ~ x.  */
3137 	    return op1;
3138 	  return gen_rtx_AND (0, op0, op1);
3139 	}
3140       if (! add)
3141 	return NULL;
3142       return gen_rtx_IOR (0, old, x);
3143 
3144     case NOT:
3145       op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3146       if (op0 != NULL)
3147 	return not_reg_cond (op0);
3148       if (! add)
3149 	return NULL;
3150       return gen_rtx_IOR (0, old, x);
3151 
3152     default:
3153       abort ();
3154     }
3155 }
3156 
3157 static rtx
not_reg_cond(x)3158 not_reg_cond (x)
3159      rtx x;
3160 {
3161   enum rtx_code x_code;
3162 
3163   if (x == const0_rtx)
3164     return const1_rtx;
3165   else if (x == const1_rtx)
3166     return const0_rtx;
3167   x_code = GET_CODE (x);
3168   if (x_code == NOT)
3169     return XEXP (x, 0);
3170   if (GET_RTX_CLASS (x_code) == '<'
3171       && GET_CODE (XEXP (x, 0)) == REG)
3172     {
3173       if (XEXP (x, 1) != const0_rtx)
3174 	abort ();
3175 
3176       return gen_rtx_fmt_ee (reverse_condition (x_code),
3177 			     VOIDmode, XEXP (x, 0), const0_rtx);
3178     }
3179   return gen_rtx_NOT (0, x);
3180 }
3181 
3182 static rtx
and_reg_cond(old,x,add)3183 and_reg_cond (old, x, add)
3184      rtx old, x;
3185      int add;
3186 {
3187   rtx op0, op1;
3188 
3189   if (GET_RTX_CLASS (GET_CODE (old)) == '<')
3190     {
3191       if (GET_RTX_CLASS (GET_CODE (x)) == '<'
3192 	  && GET_CODE (x) == reverse_condition (GET_CODE (old))
3193 	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3194 	return const0_rtx;
3195       if (GET_CODE (x) == GET_CODE (old)
3196 	  && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3197 	return old;
3198       if (! add)
3199 	return NULL;
3200       return gen_rtx_AND (0, old, x);
3201     }
3202 
3203   switch (GET_CODE (old))
3204     {
3205     case IOR:
3206       op0 = and_reg_cond (XEXP (old, 0), x, 0);
3207       op1 = and_reg_cond (XEXP (old, 1), x, 0);
3208       if (op0 != NULL || op1 != NULL)
3209 	{
3210 	  if (op0 == const0_rtx)
3211 	    return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3212 	  if (op1 == const0_rtx)
3213 	    return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3214 	  if (op0 == const1_rtx || op1 == const1_rtx)
3215 	    return const1_rtx;
3216 	  if (op0 == NULL)
3217 	    op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3218 	  else if (rtx_equal_p (x, op0))
3219 	    /* (x | A) & x ~ x.  */
3220 	    return op0;
3221 	  if (op1 == NULL)
3222 	    op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3223 	  else if (rtx_equal_p (x, op1))
3224 	    /* (A | x) & x ~ x.  */
3225 	    return op1;
3226 	  return gen_rtx_IOR (0, op0, op1);
3227 	}
3228       if (! add)
3229 	return NULL;
3230       return gen_rtx_AND (0, old, x);
3231 
3232     case AND:
3233       op0 = and_reg_cond (XEXP (old, 0), x, 0);
3234       op1 = and_reg_cond (XEXP (old, 1), x, 0);
3235       if (op0 != NULL || op1 != NULL)
3236 	{
3237 	  if (op0 == const1_rtx)
3238 	    return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3239 	  if (op1 == const1_rtx)
3240 	    return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3241 	  if (op0 == const0_rtx || op1 == const0_rtx)
3242 	    return const0_rtx;
3243 	  if (op0 == NULL)
3244 	    op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3245 	  else if (rtx_equal_p (x, op0))
3246 	    /* (x & A) & x ~ (x & A).  */
3247 	    return old;
3248 	  if (op1 == NULL)
3249 	    op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3250 	  else if (rtx_equal_p (x, op1))
3251 	    /* (A & x) & x ~ (A & x).  */
3252 	    return old;
3253 	  return gen_rtx_AND (0, op0, op1);
3254 	}
3255       if (! add)
3256 	return NULL;
3257       return gen_rtx_AND (0, old, x);
3258 
3259     case NOT:
3260       op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3261       if (op0 != NULL)
3262 	return not_reg_cond (op0);
3263       if (! add)
3264 	return NULL;
3265       return gen_rtx_AND (0, old, x);
3266 
3267     default:
3268       abort ();
3269     }
3270 }
3271 
3272 /* Given a condition X, remove references to reg REGNO and return the
3273    new condition.  The removal will be done so that all conditions
3274    involving REGNO are considered to evaluate to false.  This function
3275    is used when the value of REGNO changes.  */
3276 
3277 static rtx
elim_reg_cond(x,regno)3278 elim_reg_cond (x, regno)
3279      rtx x;
3280      unsigned int regno;
3281 {
3282   rtx op0, op1;
3283 
3284   if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3285     {
3286       if (REGNO (XEXP (x, 0)) == regno)
3287 	return const0_rtx;
3288       return x;
3289     }
3290 
3291   switch (GET_CODE (x))
3292     {
3293     case AND:
3294       op0 = elim_reg_cond (XEXP (x, 0), regno);
3295       op1 = elim_reg_cond (XEXP (x, 1), regno);
3296       if (op0 == const0_rtx || op1 == const0_rtx)
3297 	return const0_rtx;
3298       if (op0 == const1_rtx)
3299 	return op1;
3300       if (op1 == const1_rtx)
3301 	return op0;
3302       if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3303 	return x;
3304       return gen_rtx_AND (0, op0, op1);
3305 
3306     case IOR:
3307       op0 = elim_reg_cond (XEXP (x, 0), regno);
3308       op1 = elim_reg_cond (XEXP (x, 1), regno);
3309       if (op0 == const1_rtx || op1 == const1_rtx)
3310 	return const1_rtx;
3311       if (op0 == const0_rtx)
3312 	return op1;
3313       if (op1 == const0_rtx)
3314 	return op0;
3315       if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3316 	return x;
3317       return gen_rtx_IOR (0, op0, op1);
3318 
3319     case NOT:
3320       op0 = elim_reg_cond (XEXP (x, 0), regno);
3321       if (op0 == const0_rtx)
3322 	return const1_rtx;
3323       if (op0 == const1_rtx)
3324 	return const0_rtx;
3325       if (op0 != XEXP (x, 0))
3326 	return not_reg_cond (op0);
3327       return x;
3328 
3329     default:
3330       abort ();
3331     }
3332 }
3333 #endif /* HAVE_conditional_execution */
3334 
3335 #ifdef AUTO_INC_DEC
3336 
3337 /* Try to substitute the auto-inc expression INC as the address inside
3338    MEM which occurs in INSN.  Currently, the address of MEM is an expression
3339    involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
3340    that has a single set whose source is a PLUS of INCR_REG and something
3341    else.  */
3342 
3343 static void
attempt_auto_inc(pbi,inc,insn,mem,incr,incr_reg)3344 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
3345      struct propagate_block_info *pbi;
3346      rtx inc, insn, mem, incr, incr_reg;
3347 {
3348   int regno = REGNO (incr_reg);
3349   rtx set = single_set (incr);
3350   rtx q = SET_DEST (set);
3351   rtx y = SET_SRC (set);
3352   int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3353 
3354   /* Make sure this reg appears only once in this insn.  */
3355   if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
3356     return;
3357 
3358   if (dead_or_set_p (incr, incr_reg)
3359       /* Mustn't autoinc an eliminable register.  */
3360       && (regno >= FIRST_PSEUDO_REGISTER
3361 	  || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
3362     {
3363       /* This is the simple case.  Try to make the auto-inc.  If
3364 	 we can't, we are done.  Otherwise, we will do any
3365 	 needed updates below.  */
3366       if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
3367 	return;
3368     }
3369   else if (GET_CODE (q) == REG
3370 	   /* PREV_INSN used here to check the semi-open interval
3371 	      [insn,incr).  */
3372 	   && ! reg_used_between_p (q,  PREV_INSN (insn), incr)
3373 	   /* We must also check for sets of q as q may be
3374 	      a call clobbered hard register and there may
3375 	      be a call between PREV_INSN (insn) and incr.  */
3376 	   && ! reg_set_between_p (q,  PREV_INSN (insn), incr))
3377     {
3378       /* We have *p followed sometime later by q = p+size.
3379 	 Both p and q must be live afterward,
3380 	 and q is not used between INSN and its assignment.
3381 	 Change it to q = p, ...*q..., q = q+size.
3382 	 Then fall into the usual case.  */
3383       rtx insns, temp;
3384 
3385       start_sequence ();
3386       emit_move_insn (q, incr_reg);
3387       insns = get_insns ();
3388       end_sequence ();
3389 
3390       /* If we can't make the auto-inc, or can't make the
3391 	 replacement into Y, exit.  There's no point in making
3392 	 the change below if we can't do the auto-inc and doing
3393 	 so is not correct in the pre-inc case.  */
3394 
3395       XEXP (inc, 0) = q;
3396       validate_change (insn, &XEXP (mem, 0), inc, 1);
3397       validate_change (incr, &XEXP (y, opnum), q, 1);
3398       if (! apply_change_group ())
3399 	return;
3400 
3401       /* We now know we'll be doing this change, so emit the
3402 	 new insn(s) and do the updates.  */
3403       emit_insn_before (insns, insn);
3404 
3405       if (pbi->bb->head == insn)
3406 	pbi->bb->head = insns;
3407 
3408       /* INCR will become a NOTE and INSN won't contain a
3409 	 use of INCR_REG.  If a use of INCR_REG was just placed in
3410 	 the insn before INSN, make that the next use.
3411 	 Otherwise, invalidate it.  */
3412       if (GET_CODE (PREV_INSN (insn)) == INSN
3413 	  && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3414 	  && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
3415 	pbi->reg_next_use[regno] = PREV_INSN (insn);
3416       else
3417 	pbi->reg_next_use[regno] = 0;
3418 
3419       incr_reg = q;
3420       regno = REGNO (q);
3421 
3422       /* REGNO is now used in INCR which is below INSN, but
3423 	 it previously wasn't live here.  If we don't mark
3424 	 it as live, we'll put a REG_DEAD note for it
3425 	 on this insn, which is incorrect.  */
3426       SET_REGNO_REG_SET (pbi->reg_live, regno);
3427 
3428       /* If there are any calls between INSN and INCR, show
3429 	 that REGNO now crosses them.  */
3430       for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3431 	if (GET_CODE (temp) == CALL_INSN)
3432 	  REG_N_CALLS_CROSSED (regno)++;
3433 
3434       /* Invalidate alias info for Q since we just changed its value.  */
3435       clear_reg_alias_info (q);
3436     }
3437   else
3438     return;
3439 
3440   /* If we haven't returned, it means we were able to make the
3441      auto-inc, so update the status.  First, record that this insn
3442      has an implicit side effect.  */
3443 
3444   REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3445 
3446   /* Modify the old increment-insn to simply copy
3447      the already-incremented value of our register.  */
3448   if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
3449     abort ();
3450 
3451   /* If that makes it a no-op (copying the register into itself) delete
3452      it so it won't appear to be a "use" and a "set" of this
3453      register.  */
3454   if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3455     {
3456       /* If the original source was dead, it's dead now.  */
3457       rtx note;
3458 
3459       while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
3460 	{
3461 	  remove_note (incr, note);
3462 	  if (XEXP (note, 0) != incr_reg)
3463 	    CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
3464 	}
3465 
3466       PUT_CODE (incr, NOTE);
3467       NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3468       NOTE_SOURCE_FILE (incr) = 0;
3469     }
3470 
3471   if (regno >= FIRST_PSEUDO_REGISTER)
3472     {
3473       /* Count an extra reference to the reg.  When a reg is
3474 	 incremented, spilling it is worse, so we want to make
3475 	 that less likely.  */
3476       REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3477 
3478       /* Count the increment as a setting of the register,
3479 	 even though it isn't a SET in rtl.  */
3480       REG_N_SETS (regno)++;
3481     }
3482 }
3483 
3484 /* X is a MEM found in INSN.  See if we can convert it into an auto-increment
3485    reference.  */
3486 
3487 static void
find_auto_inc(pbi,x,insn)3488 find_auto_inc (pbi, x, insn)
3489      struct propagate_block_info *pbi;
3490      rtx x;
3491      rtx insn;
3492 {
3493   rtx addr = XEXP (x, 0);
3494   HOST_WIDE_INT offset = 0;
3495   rtx set, y, incr, inc_val;
3496   int regno;
3497   int size = GET_MODE_SIZE (GET_MODE (x));
3498 
3499   if (GET_CODE (insn) == JUMP_INSN)
3500     return;
3501 
3502   /* Here we detect use of an index register which might be good for
3503      postincrement, postdecrement, preincrement, or predecrement.  */
3504 
3505   if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3506     offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3507 
3508   if (GET_CODE (addr) != REG)
3509     return;
3510 
3511   regno = REGNO (addr);
3512 
3513   /* Is the next use an increment that might make auto-increment? */
3514   incr = pbi->reg_next_use[regno];
3515   if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
3516     return;
3517   set = single_set (incr);
3518   if (set == 0 || GET_CODE (set) != SET)
3519     return;
3520   y = SET_SRC (set);
3521 
3522   if (GET_CODE (y) != PLUS)
3523     return;
3524 
3525   if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
3526     inc_val = XEXP (y, 1);
3527   else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
3528     inc_val = XEXP (y, 0);
3529   else
3530     return;
3531 
3532   if (GET_CODE (inc_val) == CONST_INT)
3533     {
3534       if (HAVE_POST_INCREMENT
3535 	  && (INTVAL (inc_val) == size && offset == 0))
3536 	attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
3537 			  incr, addr);
3538       else if (HAVE_POST_DECREMENT
3539 	       && (INTVAL (inc_val) == -size && offset == 0))
3540 	attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
3541 			  incr, addr);
3542       else if (HAVE_PRE_INCREMENT
3543 	       && (INTVAL (inc_val) == size && offset == size))
3544 	attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
3545 			  incr, addr);
3546       else if (HAVE_PRE_DECREMENT
3547 	       && (INTVAL (inc_val) == -size && offset == -size))
3548 	attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
3549 			  incr, addr);
3550       else if (HAVE_POST_MODIFY_DISP && offset == 0)
3551 	attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3552 						    gen_rtx_PLUS (Pmode,
3553 								  addr,
3554 								  inc_val)),
3555 			  insn, x, incr, addr);
3556     }
3557   else if (GET_CODE (inc_val) == REG
3558 	   && ! reg_set_between_p (inc_val, PREV_INSN (insn),
3559 				   NEXT_INSN (incr)))
3560 
3561     {
3562       if (HAVE_POST_MODIFY_REG && offset == 0)
3563 	attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3564 						    gen_rtx_PLUS (Pmode,
3565 								  addr,
3566 								  inc_val)),
3567 			  insn, x, incr, addr);
3568     }
3569 }
3570 
3571 #endif /* AUTO_INC_DEC */
3572 
3573 static void
mark_used_reg(pbi,reg,cond,insn)3574 mark_used_reg (pbi, reg, cond, insn)
3575      struct propagate_block_info *pbi;
3576      rtx reg;
3577      rtx cond ATTRIBUTE_UNUSED;
3578      rtx insn;
3579 {
3580   unsigned int regno_first, regno_last, i;
3581   int some_was_live, some_was_dead, some_not_set;
3582 
3583   regno_last = regno_first = REGNO (reg);
3584   if (regno_first < FIRST_PSEUDO_REGISTER)
3585     regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
3586 
3587   /* Find out if any of this register is live after this instruction.  */
3588   some_was_live = some_was_dead = 0;
3589   for (i = regno_first; i <= regno_last; ++i)
3590     {
3591       int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
3592       some_was_live |= needed_regno;
3593       some_was_dead |= ! needed_regno;
3594     }
3595 
3596   /* Find out if any of the register was set this insn.  */
3597   some_not_set = 0;
3598   for (i = regno_first; i <= regno_last; ++i)
3599     some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
3600 
3601   if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3602     {
3603       /* Record where each reg is used, so when the reg is set we know
3604 	 the next insn that uses it.  */
3605       pbi->reg_next_use[regno_first] = insn;
3606     }
3607 
3608   if (pbi->flags & PROP_REG_INFO)
3609     {
3610       if (regno_first < FIRST_PSEUDO_REGISTER)
3611 	{
3612 	  /* If this is a register we are going to try to eliminate,
3613 	     don't mark it live here.  If we are successful in
3614 	     eliminating it, it need not be live unless it is used for
3615 	     pseudos, in which case it will have been set live when it
3616 	     was allocated to the pseudos.  If the register will not
3617 	     be eliminated, reload will set it live at that point.
3618 
3619 	     Otherwise, record that this function uses this register.  */
3620 	  /* ??? The PPC backend tries to "eliminate" on the pic
3621 	     register to itself.  This should be fixed.  In the mean
3622 	     time, hack around it.  */
3623 
3624 	  if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
3625 	         && (regno_first == FRAME_POINTER_REGNUM
3626 		     || regno_first == ARG_POINTER_REGNUM)))
3627 	    for (i = regno_first; i <= regno_last; ++i)
3628 	      regs_ever_live[i] = 1;
3629 	}
3630       else
3631 	{
3632 	  /* Keep track of which basic block each reg appears in.  */
3633 
3634 	  int blocknum = pbi->bb->index;
3635 	  if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
3636 	    REG_BASIC_BLOCK (regno_first) = blocknum;
3637 	  else if (REG_BASIC_BLOCK (regno_first) != blocknum)
3638 	    REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3639 
3640 	  /* Count (weighted) number of uses of each reg.  */
3641 	  REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
3642 	  REG_N_REFS (regno_first)++;
3643 	}
3644     }
3645 
3646   /* Record and count the insns in which a reg dies.  If it is used in
3647      this insn and was dead below the insn then it dies in this insn.
3648      If it was set in this insn, we do not make a REG_DEAD note;
3649      likewise if we already made such a note.  */
3650   if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
3651       && some_was_dead
3652       && some_not_set)
3653     {
3654       /* Check for the case where the register dying partially
3655 	 overlaps the register set by this insn.  */
3656       if (regno_first != regno_last)
3657 	for (i = regno_first; i <= regno_last; ++i)
3658 	  some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3659 
3660       /* If none of the words in X is needed, make a REG_DEAD note.
3661 	 Otherwise, we must make partial REG_DEAD notes.  */
3662       if (! some_was_live)
3663 	{
3664 	  if ((pbi->flags & PROP_DEATH_NOTES)
3665 	      && ! find_regno_note (insn, REG_DEAD, regno_first))
3666 	    REG_NOTES (insn)
3667 	      = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3668 
3669 	  if (pbi->flags & PROP_REG_INFO)
3670 	    REG_N_DEATHS (regno_first)++;
3671 	}
3672       else
3673 	{
3674 	  /* Don't make a REG_DEAD note for a part of a register
3675 	     that is set in the insn.  */
3676 	  for (i = regno_first; i <= regno_last; ++i)
3677 	    if (! REGNO_REG_SET_P (pbi->reg_live, i)
3678 		&& ! dead_or_set_regno_p (insn, i))
3679 	      REG_NOTES (insn)
3680 		= alloc_EXPR_LIST (REG_DEAD,
3681 				   regno_reg_rtx[i],
3682 				   REG_NOTES (insn));
3683 	}
3684     }
3685 
3686   /* Mark the register as being live.  */
3687   for (i = regno_first; i <= regno_last; ++i)
3688     {
3689 #ifdef HAVE_conditional_execution
3690       int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i);
3691 #endif
3692 
3693       SET_REGNO_REG_SET (pbi->reg_live, i);
3694 
3695 #ifdef HAVE_conditional_execution
3696       /* If this is a conditional use, record that fact.  If it is later
3697 	 conditionally set, we'll know to kill the register.  */
3698       if (cond != NULL_RTX)
3699 	{
3700 	  splay_tree_node node;
3701 	  struct reg_cond_life_info *rcli;
3702 	  rtx ncond;
3703 
3704 	  if (this_was_live)
3705 	    {
3706 	      node = splay_tree_lookup (pbi->reg_cond_dead, i);
3707 	      if (node == NULL)
3708 		{
3709 		  /* The register was unconditionally live previously.
3710 		     No need to do anything.  */
3711 		}
3712 	      else
3713 		{
3714 		  /* The register was conditionally live previously.
3715 		     Subtract the new life cond from the old death cond.  */
3716 		  rcli = (struct reg_cond_life_info *) node->value;
3717 		  ncond = rcli->condition;
3718 		  ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3719 
3720 		  /* If the register is now unconditionally live,
3721 		     remove the entry in the splay_tree.  */
3722 		  if (ncond == const0_rtx)
3723 		    splay_tree_remove (pbi->reg_cond_dead, i);
3724 		  else
3725 		    {
3726 		      rcli->condition = ncond;
3727 		      SET_REGNO_REG_SET (pbi->reg_cond_reg,
3728 					 REGNO (XEXP (cond, 0)));
3729 		    }
3730 		}
3731 	    }
3732 	  else
3733 	    {
3734 	      /* The register was not previously live at all.  Record
3735 		 the condition under which it is still dead.  */
3736 	      rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3737 	      rcli->condition = not_reg_cond (cond);
3738 	      rcli->stores = const0_rtx;
3739 	      rcli->orig_condition = const0_rtx;
3740 	      splay_tree_insert (pbi->reg_cond_dead, i,
3741 				 (splay_tree_value) rcli);
3742 
3743 	      SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3744 	    }
3745 	}
3746       else if (this_was_live)
3747 	{
3748 	  /* The register may have been conditionally live previously, but
3749 	     is now unconditionally live.  Remove it from the conditionally
3750 	     dead list, so that a conditional set won't cause us to think
3751 	     it dead.  */
3752 	  splay_tree_remove (pbi->reg_cond_dead, i);
3753 	}
3754 #endif
3755     }
3756 }
3757 
3758 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
3759    This is done assuming the registers needed from X are those that
3760    have 1-bits in PBI->REG_LIVE.
3761 
3762    INSN is the containing instruction.  If INSN is dead, this function
3763    is not called.  */
3764 
3765 static void
mark_used_regs(pbi,x,cond,insn)3766 mark_used_regs (pbi, x, cond, insn)
3767      struct propagate_block_info *pbi;
3768      rtx x, cond, insn;
3769 {
3770   RTX_CODE code;
3771   int regno;
3772   int flags = pbi->flags;
3773 
3774  retry:
3775   if (!x)
3776     return;
3777   code = GET_CODE (x);
3778   switch (code)
3779     {
3780     case LABEL_REF:
3781     case SYMBOL_REF:
3782     case CONST_INT:
3783     case CONST:
3784     case CONST_DOUBLE:
3785     case CONST_VECTOR:
3786     case PC:
3787     case ADDR_VEC:
3788     case ADDR_DIFF_VEC:
3789       return;
3790 
3791 #ifdef HAVE_cc0
3792     case CC0:
3793       pbi->cc0_live = 1;
3794       return;
3795 #endif
3796 
3797     case CLOBBER:
3798       /* If we are clobbering a MEM, mark any registers inside the address
3799 	 as being used.  */
3800       if (GET_CODE (XEXP (x, 0)) == MEM)
3801 	mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
3802       return;
3803 
3804     case MEM:
3805       /* Don't bother watching stores to mems if this is not the
3806 	 final pass.  We'll not be deleting dead stores this round.  */
3807       if (optimize && (flags & PROP_SCAN_DEAD_STORES))
3808 	{
3809 	  /* Invalidate the data for the last MEM stored, but only if MEM is
3810 	     something that can be stored into.  */
3811 	  if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3812 	      && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3813 	    /* Needn't clear the memory set list.  */
3814 	    ;
3815 	  else
3816 	    {
3817 	      rtx temp = pbi->mem_set_list;
3818 	      rtx prev = NULL_RTX;
3819 	      rtx next;
3820 
3821 	      while (temp)
3822 		{
3823 		  next = XEXP (temp, 1);
3824 		  if (anti_dependence (XEXP (temp, 0), x))
3825 		    {
3826 		      /* Splice temp out of the list.  */
3827 		      if (prev)
3828 			XEXP (prev, 1) = next;
3829 		      else
3830 			pbi->mem_set_list = next;
3831 		      free_EXPR_LIST_node (temp);
3832 		      pbi->mem_set_list_len--;
3833 		    }
3834 		  else
3835 		    prev = temp;
3836 		  temp = next;
3837 		}
3838 	    }
3839 
3840 	  /* If the memory reference had embedded side effects (autoincrement
3841 	     address modes.  Then we may need to kill some entries on the
3842 	     memory set list.  */
3843 	  if (insn)
3844 	    for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
3845 	}
3846 
3847 #ifdef AUTO_INC_DEC
3848       if (flags & PROP_AUTOINC)
3849 	find_auto_inc (pbi, x, insn);
3850 #endif
3851       break;
3852 
3853     case SUBREG:
3854 #ifdef CANNOT_CHANGE_MODE_CLASS
3855       record_subregs_of_mode (x);
3856 #endif
3857 
3858       /* While we're here, optimize this case.  */
3859       x = SUBREG_REG (x);
3860       if (GET_CODE (x) != REG)
3861 	goto retry;
3862       /* Fall through.  */
3863 
3864     case REG:
3865       /* See a register other than being set => mark it as needed.  */
3866       mark_used_reg (pbi, x, cond, insn);
3867       return;
3868 
3869     case SET:
3870       {
3871 	rtx testreg = SET_DEST (x);
3872 	int mark_dest = 0;
3873 
3874 	/* If storing into MEM, don't show it as being used.  But do
3875 	   show the address as being used.  */
3876 	if (GET_CODE (testreg) == MEM)
3877 	  {
3878 #ifdef AUTO_INC_DEC
3879 	    if (flags & PROP_AUTOINC)
3880 	      find_auto_inc (pbi, testreg, insn);
3881 #endif
3882 	    mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
3883 	    mark_used_regs (pbi, SET_SRC (x), cond, insn);
3884 	    return;
3885 	  }
3886 
3887 	/* Storing in STRICT_LOW_PART is like storing in a reg
3888 	   in that this SET might be dead, so ignore it in TESTREG.
3889 	   but in some other ways it is like using the reg.
3890 
3891 	   Storing in a SUBREG or a bit field is like storing the entire
3892 	   register in that if the register's value is not used
3893 	   then this SET is not needed.  */
3894 	while (GET_CODE (testreg) == STRICT_LOW_PART
3895 	       || GET_CODE (testreg) == ZERO_EXTRACT
3896 	       || GET_CODE (testreg) == SIGN_EXTRACT
3897 	       || GET_CODE (testreg) == SUBREG)
3898 	  {
3899 #ifdef CANNOT_CHANGE_MODE_CLASS
3900 	    if (GET_CODE (testreg) == SUBREG)
3901 	      record_subregs_of_mode (testreg);
3902 #endif
3903 
3904 	    /* Modifying a single register in an alternate mode
3905 	       does not use any of the old value.  But these other
3906 	       ways of storing in a register do use the old value.  */
3907 	    if (GET_CODE (testreg) == SUBREG
3908 		&& !((REG_BYTES (SUBREG_REG (testreg))
3909 		      + UNITS_PER_WORD - 1) / UNITS_PER_WORD
3910 		     > (REG_BYTES (testreg)
3911 			+ UNITS_PER_WORD - 1) / UNITS_PER_WORD))
3912 	      ;
3913 	    else
3914 	      mark_dest = 1;
3915 
3916 	    testreg = XEXP (testreg, 0);
3917 	  }
3918 
3919 	/* If this is a store into a register or group of registers,
3920 	   recursively scan the value being stored.  */
3921 
3922 	if ((GET_CODE (testreg) == PARALLEL
3923 	     && GET_MODE (testreg) == BLKmode)
3924 	    || (GET_CODE (testreg) == REG
3925 		&& (regno = REGNO (testreg),
3926 		    ! (regno == FRAME_POINTER_REGNUM
3927 		       && (! reload_completed || frame_pointer_needed)))
3928 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3929 		&& ! (regno == HARD_FRAME_POINTER_REGNUM
3930 		      && (! reload_completed || frame_pointer_needed))
3931 #endif
3932 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3933 		&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3934 #endif
3935 		))
3936 	  {
3937 	    if (mark_dest)
3938 	      mark_used_regs (pbi, SET_DEST (x), cond, insn);
3939 	    mark_used_regs (pbi, SET_SRC (x), cond, insn);
3940 	    return;
3941 	  }
3942       }
3943       break;
3944 
3945     case ASM_OPERANDS:
3946     case UNSPEC_VOLATILE:
3947     case TRAP_IF:
3948     case ASM_INPUT:
3949       {
3950 	/* Traditional and volatile asm instructions must be considered to use
3951 	   and clobber all hard registers, all pseudo-registers and all of
3952 	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
3953 
3954 	   Consider for instance a volatile asm that changes the fpu rounding
3955 	   mode.  An insn should not be moved across this even if it only uses
3956 	   pseudo-regs because it might give an incorrectly rounded result.
3957 
3958 	   ?!? Unfortunately, marking all hard registers as live causes massive
3959 	   problems for the register allocator and marking all pseudos as live
3960 	   creates mountains of uninitialized variable warnings.
3961 
3962 	   So for now, just clear the memory set list and mark any regs
3963 	   we can find in ASM_OPERANDS as used.  */
3964 	if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3965 	  {
3966 	    free_EXPR_LIST_list (&pbi->mem_set_list);
3967 	    pbi->mem_set_list_len = 0;
3968 	  }
3969 
3970 	/* For all ASM_OPERANDS, we must traverse the vector of input operands.
3971 	   We can not just fall through here since then we would be confused
3972 	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3973 	   traditional asms unlike their normal usage.  */
3974 	if (code == ASM_OPERANDS)
3975 	  {
3976 	    int j;
3977 
3978 	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3979 	      mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
3980 	  }
3981 	break;
3982       }
3983 
3984     case COND_EXEC:
3985       if (cond != NULL_RTX)
3986 	abort ();
3987 
3988       mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
3989 
3990       cond = COND_EXEC_TEST (x);
3991       x = COND_EXEC_CODE (x);
3992       goto retry;
3993 
3994     case PHI:
3995       /* We _do_not_ want to scan operands of phi nodes.  Operands of
3996 	 a phi function are evaluated only when control reaches this
3997 	 block along a particular edge.  Therefore, regs that appear
3998 	 as arguments to phi should not be added to the global live at
3999 	 start.  */
4000       return;
4001 
4002     default:
4003       break;
4004     }
4005 
4006   /* Recursively scan the operands of this expression.  */
4007 
4008   {
4009     const char * const fmt = GET_RTX_FORMAT (code);
4010     int i;
4011 
4012     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4013       {
4014 	if (fmt[i] == 'e')
4015 	  {
4016 	    /* Tail recursive case: save a function call level.  */
4017 	    if (i == 0)
4018 	      {
4019 		x = XEXP (x, 0);
4020 		goto retry;
4021 	      }
4022 	    mark_used_regs (pbi, XEXP (x, i), cond, insn);
4023 	  }
4024 	else if (fmt[i] == 'E')
4025 	  {
4026 	    int j;
4027 	    for (j = 0; j < XVECLEN (x, i); j++)
4028 	      mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4029 	  }
4030       }
4031   }
4032 }
4033 
4034 #ifdef AUTO_INC_DEC
4035 
4036 static int
try_pre_increment_1(pbi,insn)4037 try_pre_increment_1 (pbi, insn)
4038      struct propagate_block_info *pbi;
4039      rtx insn;
4040 {
4041   /* Find the next use of this reg.  If in same basic block,
4042      make it do pre-increment or pre-decrement if appropriate.  */
4043   rtx x = single_set (insn);
4044   HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4045 			  * INTVAL (XEXP (SET_SRC (x), 1)));
4046   int regno = REGNO (SET_DEST (x));
4047   rtx y = pbi->reg_next_use[regno];
4048   if (y != 0
4049       && SET_DEST (x) != stack_pointer_rtx
4050       && BLOCK_NUM (y) == BLOCK_NUM (insn)
4051       /* Don't do this if the reg dies, or gets set in y; a standard addressing
4052 	 mode would be better.  */
4053       && ! dead_or_set_p (y, SET_DEST (x))
4054       && try_pre_increment (y, SET_DEST (x), amount))
4055     {
4056       /* We have found a suitable auto-increment and already changed
4057 	 insn Y to do it.  So flush this increment instruction.  */
4058       propagate_block_delete_insn (insn);
4059 
4060       /* Count a reference to this reg for the increment insn we are
4061 	 deleting.  When a reg is incremented, spilling it is worse,
4062 	 so we want to make that less likely.  */
4063       if (regno >= FIRST_PSEUDO_REGISTER)
4064 	{
4065 	  REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
4066 	  REG_N_SETS (regno)++;
4067 	}
4068 
4069       /* Flush any remembered memories depending on the value of
4070 	 the incremented register.  */
4071       invalidate_mems_from_set (pbi, SET_DEST (x));
4072 
4073       return 1;
4074     }
4075   return 0;
4076 }
4077 
4078 /* Try to change INSN so that it does pre-increment or pre-decrement
4079    addressing on register REG in order to add AMOUNT to REG.
4080    AMOUNT is negative for pre-decrement.
4081    Returns 1 if the change could be made.
4082    This checks all about the validity of the result of modifying INSN.  */
4083 
4084 static int
try_pre_increment(insn,reg,amount)4085 try_pre_increment (insn, reg, amount)
4086      rtx insn, reg;
4087      HOST_WIDE_INT amount;
4088 {
4089   rtx use;
4090 
4091   /* Nonzero if we can try to make a pre-increment or pre-decrement.
4092      For example, addl $4,r1; movl (r1),... can become movl +(r1),...  */
4093   int pre_ok = 0;
4094   /* Nonzero if we can try to make a post-increment or post-decrement.
4095      For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4096      It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4097      supports both pre-inc and post-inc, or both pre-dec and post-dec.  */
4098   int post_ok = 0;
4099 
4100   /* Nonzero if the opportunity actually requires post-inc or post-dec.  */
4101   int do_post = 0;
4102 
4103   /* From the sign of increment, see which possibilities are conceivable
4104      on this target machine.  */
4105   if (HAVE_PRE_INCREMENT && amount > 0)
4106     pre_ok = 1;
4107   if (HAVE_POST_INCREMENT && amount > 0)
4108     post_ok = 1;
4109 
4110   if (HAVE_PRE_DECREMENT && amount < 0)
4111     pre_ok = 1;
4112   if (HAVE_POST_DECREMENT && amount < 0)
4113     post_ok = 1;
4114 
4115   if (! (pre_ok || post_ok))
4116     return 0;
4117 
4118   /* It is not safe to add a side effect to a jump insn
4119      because if the incremented register is spilled and must be reloaded
4120      there would be no way to store the incremented value back in memory.  */
4121 
4122   if (GET_CODE (insn) == JUMP_INSN)
4123     return 0;
4124 
4125   use = 0;
4126   if (pre_ok)
4127     use = find_use_as_address (PATTERN (insn), reg, 0);
4128   if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4129     {
4130       use = find_use_as_address (PATTERN (insn), reg, -amount);
4131       do_post = 1;
4132     }
4133 
4134   if (use == 0 || use == (rtx) (size_t) 1)
4135     return 0;
4136 
4137   if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4138     return 0;
4139 
4140   /* See if this combination of instruction and addressing mode exists.  */
4141   if (! validate_change (insn, &XEXP (use, 0),
4142 			 gen_rtx_fmt_e (amount > 0
4143 					? (do_post ? POST_INC : PRE_INC)
4144 					: (do_post ? POST_DEC : PRE_DEC),
4145 					Pmode, reg), 0))
4146     return 0;
4147 
4148   /* Record that this insn now has an implicit side effect on X.  */
4149   REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4150   return 1;
4151 }
4152 
4153 #endif /* AUTO_INC_DEC */
4154 
4155 /* Find the place in the rtx X where REG is used as a memory address.
4156    Return the MEM rtx that so uses it.
4157    If PLUSCONST is nonzero, search instead for a memory address equivalent to
4158    (plus REG (const_int PLUSCONST)).
4159 
4160    If such an address does not appear, return 0.
4161    If REG appears more than once, or is used other than in such an address,
4162    return (rtx) 1.  */
4163 
4164 rtx
find_use_as_address(x,reg,plusconst)4165 find_use_as_address (x, reg, plusconst)
4166      rtx x;
4167      rtx reg;
4168      HOST_WIDE_INT plusconst;
4169 {
4170   enum rtx_code code = GET_CODE (x);
4171   const char * const fmt = GET_RTX_FORMAT (code);
4172   int i;
4173   rtx value = 0;
4174   rtx tem;
4175 
4176   if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4177     return x;
4178 
4179   if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4180       && XEXP (XEXP (x, 0), 0) == reg
4181       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4182       && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4183     return x;
4184 
4185   if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4186     {
4187       /* If REG occurs inside a MEM used in a bit-field reference,
4188 	 that is unacceptable.  */
4189       if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4190 	return (rtx) (size_t) 1;
4191     }
4192 
4193   if (x == reg)
4194     return (rtx) (size_t) 1;
4195 
4196   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4197     {
4198       if (fmt[i] == 'e')
4199 	{
4200 	  tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4201 	  if (value == 0)
4202 	    value = tem;
4203 	  else if (tem != 0)
4204 	    return (rtx) (size_t) 1;
4205 	}
4206       else if (fmt[i] == 'E')
4207 	{
4208 	  int j;
4209 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4210 	    {
4211 	      tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4212 	      if (value == 0)
4213 		value = tem;
4214 	      else if (tem != 0)
4215 		return (rtx) (size_t) 1;
4216 	    }
4217 	}
4218     }
4219 
4220   return value;
4221 }
4222 
4223 /* Write information about registers and basic blocks into FILE.
4224    This is part of making a debugging dump.  */
4225 
4226 void
dump_regset(r,outf)4227 dump_regset (r, outf)
4228      regset r;
4229      FILE *outf;
4230 {
4231   int i;
4232   if (r == NULL)
4233     {
4234       fputs (" (nil)", outf);
4235       return;
4236     }
4237 
4238   EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4239     {
4240       fprintf (outf, " %d", i);
4241       if (i < FIRST_PSEUDO_REGISTER)
4242 	fprintf (outf, " [%s]",
4243 		 reg_names[i]);
4244     });
4245 }
4246 
4247 /* Print a human-reaable representation of R on the standard error
4248    stream.  This function is designed to be used from within the
4249    debugger.  */
4250 
4251 void
debug_regset(r)4252 debug_regset (r)
4253      regset r;
4254 {
4255   dump_regset (r, stderr);
4256   putc ('\n', stderr);
4257 }
4258 
4259 /* Recompute register set/reference counts immediately prior to register
4260    allocation.
4261 
4262    This avoids problems with set/reference counts changing to/from values
4263    which have special meanings to the register allocators.
4264 
4265    Additionally, the reference counts are the primary component used by the
4266    register allocators to prioritize pseudos for allocation to hard regs.
4267    More accurate reference counts generally lead to better register allocation.
4268 
4269    F is the first insn to be scanned.
4270 
4271    LOOP_STEP denotes how much loop_depth should be incremented per
4272    loop nesting level in order to increase the ref count more for
4273    references in a loop.
4274 
4275    It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4276    possibly other information which is used by the register allocators.  */
4277 
4278 void
recompute_reg_usage(f,loop_step)4279 recompute_reg_usage (f, loop_step)
4280      rtx f ATTRIBUTE_UNUSED;
4281      int loop_step ATTRIBUTE_UNUSED;
4282 {
4283   allocate_reg_life_data ();
4284   update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
4285 }
4286 
4287 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
4288    blocks.  If BLOCKS is NULL, assume the universal set.  Returns a count
4289    of the number of registers that died.  */
4290 
4291 int
count_or_remove_death_notes(blocks,kill)4292 count_or_remove_death_notes (blocks, kill)
4293      sbitmap blocks;
4294      int kill;
4295 {
4296   int count = 0;
4297   basic_block bb;
4298 
4299   FOR_EACH_BB_REVERSE (bb)
4300     {
4301       rtx insn;
4302 
4303       if (blocks && ! TEST_BIT (blocks, bb->index))
4304 	continue;
4305 
4306       for (insn = bb->head;; insn = NEXT_INSN (insn))
4307 	{
4308 	  if (INSN_P (insn))
4309 	    {
4310 	      rtx *pprev = &REG_NOTES (insn);
4311 	      rtx link = *pprev;
4312 
4313 	      while (link)
4314 		{
4315 		  switch (REG_NOTE_KIND (link))
4316 		    {
4317 		    case REG_DEAD:
4318 		      if (GET_CODE (XEXP (link, 0)) == REG)
4319 			{
4320 			  rtx reg = XEXP (link, 0);
4321 			  int n;
4322 
4323 			  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
4324 			    n = 1;
4325 			  else
4326 			    n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
4327 			  count += n;
4328 			}
4329 		      /* Fall through.  */
4330 
4331 		    case REG_UNUSED:
4332 		      if (kill)
4333 			{
4334 			  rtx next = XEXP (link, 1);
4335 			  free_EXPR_LIST_node (link);
4336 			  *pprev = link = next;
4337 			  break;
4338 			}
4339 		      /* Fall through.  */
4340 
4341 		    default:
4342 		      pprev = &XEXP (link, 1);
4343 		      link = *pprev;
4344 		      break;
4345 		    }
4346 		}
4347 	    }
4348 
4349 	  if (insn == bb->end)
4350 	    break;
4351 	}
4352     }
4353 
4354   return count;
4355 }
4356 /* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
4357    if blocks is NULL.  */
4358 
4359 static void
clear_log_links(blocks)4360 clear_log_links (blocks)
4361      sbitmap blocks;
4362 {
4363   rtx insn;
4364   int i;
4365 
4366   if (!blocks)
4367     {
4368       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4369 	if (INSN_P (insn))
4370 	  free_INSN_LIST_list (&LOG_LINKS (insn));
4371     }
4372   else
4373     EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4374       {
4375 	basic_block bb = BASIC_BLOCK (i);
4376 
4377 	for (insn = bb->head; insn != NEXT_INSN (bb->end);
4378 	     insn = NEXT_INSN (insn))
4379 	  if (INSN_P (insn))
4380 	    free_INSN_LIST_list (&LOG_LINKS (insn));
4381       });
4382 }
4383 
4384 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
4385    correspond to the hard registers, if any, set in that map.  This
4386    could be done far more efficiently by having all sorts of special-cases
4387    with moving single words, but probably isn't worth the trouble.  */
4388 
4389 void
reg_set_to_hard_reg_set(to,from)4390 reg_set_to_hard_reg_set (to, from)
4391      HARD_REG_SET *to;
4392      bitmap from;
4393 {
4394   int i;
4395 
4396   EXECUTE_IF_SET_IN_BITMAP
4397     (from, 0, i,
4398      {
4399        if (i >= FIRST_PSEUDO_REGISTER)
4400 	 return;
4401        SET_HARD_REG_BIT (*to, i);
4402      });
4403 }
4404