xref: /openbsd-src/gnu/usr.bin/gcc/gcc/df.c (revision c87b03e512fc05ed6e0222f6fb0ae86264b1d05b)
1 /* Dataflow support routines.
2    Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3    Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
4                                     mhayes@redhat.com)
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA.
22 
23 
24 OVERVIEW:
25 
26 This file provides some dataflow routines for computing reaching defs,
27 upward exposed uses, live variables, def-use chains, and use-def
28 chains.  The global dataflow is performed using simple iterative
29 methods with a worklist and could be sped up by ordering the blocks
30 with a depth first search order.
31 
32 A `struct ref' data structure (ref) is allocated for every register
33 reference (def or use) and this records the insn and bb the ref is
34 found within.  The refs are linked together in chains of uses and defs
35 for each insn and for each register.  Each ref also has a chain field
36 that links all the use refs for a def or all the def refs for a use.
37 This is used to create use-def or def-use chains.
38 
39 
40 USAGE:
41 
42 Here's an example of using the dataflow routines.
43 
44       struct df *df;
45 
46       df = df_init ();
47 
48       df_analyse (df, 0, DF_ALL);
49 
50       df_dump (df, DF_ALL, stderr);
51 
52       df_finish (df);
53 
54 
55 df_init simply creates a poor man's object (df) that needs to be
56 passed to all the dataflow routines.  df_finish destroys this
57 object and frees up any allocated memory.
58 
59 df_analyse performs the following:
60 
61 1. Records defs and uses by scanning the insns in each basic block
62    or by scanning the insns queued by df_insn_modify.
63 2. Links defs and uses into insn-def and insn-use chains.
64 3. Links defs and uses into reg-def and reg-use chains.
65 4. Assigns LUIDs to each insn (for modified blocks).
66 5. Calculates local reaching definitions.
67 6. Calculates global reaching definitions.
68 7. Creates use-def chains.
69 8. Calculates local reaching uses (upwards exposed uses).
70 9. Calculates global reaching uses.
71 10. Creates def-use chains.
72 11. Calculates local live registers.
73 12. Calculates global live registers.
74 13. Calculates register lifetimes and determines local registers.
75 
76 
77 PHILOSOPHY:
78 
79 Note that the dataflow information is not updated for every newly
80 deleted or created insn.  If the dataflow information requires
81 updating then all the changed, new, or deleted insns needs to be
82 marked with df_insn_modify (or df_insns_modify) either directly or
83 indirectly (say through calling df_insn_delete).  df_insn_modify
84 marks all the modified insns to get processed the next time df_analyse
85  is called.
86 
87 Beware that tinkering with insns may invalidate the dataflow information.
88 The philosophy behind these routines is that once the dataflow
89 information has been gathered, the user should store what they require
90 before they tinker with any insn.  Once a reg is replaced, for example,
91 then the reg-def/reg-use chains will point to the wrong place.  Once a
92 whole lot of changes have been made, df_analyse can be called again
93 to update the dataflow information.  Currently, this is not very smart
94 with regard to propagating changes to the dataflow so it should not
95 be called very often.
96 
97 
98 DATA STRUCTURES:
99 
100 The basic object is a REF (reference) and this may either be a DEF
101 (definition) or a USE of a register.
102 
103 These are linked into a variety of lists; namely reg-def, reg-use,
104   insn-def, insn-use, def-use, and use-def lists.  For example,
105 the reg-def lists contain all the refs that define a given register
106 while the insn-use lists contain all the refs used by an insn.
107 
108 Note that the reg-def and reg-use chains are generally short (except for the
109 hard registers) and thus it is much faster to search these chains
110 rather than searching the def or use bitmaps.
111 
112 If the insns are in SSA form then the reg-def and use-def lists
113 should only contain the single defining ref.
114 
115 TODO:
116 
117 1) Incremental dataflow analysis.
118 
119 Note that if a loop invariant insn is hoisted (or sunk), we do not
120 need to change the def-use or use-def chains.  All we have to do is to
121 change the bb field for all the associated defs and uses and to
122 renumber the LUIDs for the original and new basic blocks of the insn.
123 
124 When shadowing loop mems we create new uses and defs for new pseudos
125 so we do not affect the existing dataflow information.
126 
127 My current strategy is to queue up all modified, created, or deleted
128 insns so when df_analyse is called we can easily determine all the new
129 or deleted refs.  Currently the global dataflow information is
130 recomputed from scratch but this could be propagated more efficiently.
131 
132 2) Improved global data flow computation using depth first search.
133 
134 3) Reduced memory requirements.
135 
136 We could operate a pool of ref structures.  When a ref is deleted it
137 gets returned to the pool (say by linking on to a chain of free refs).
138 This will require a pair of bitmaps for defs and uses so that we can
139 tell which ones have been changed.  Alternatively, we could
140 periodically squeeze the def and use tables and associated bitmaps and
141 renumber the def and use ids.
142 
143 4) Ordering of reg-def and reg-use lists.
144 
145 Should the first entry in the def list be the first def (within a BB)?
146 Similarly, should the first entry in the use list be the last use
147 (within a BB)?
148 
149 5) Working with a sub-CFG.
150 
151 Often the whole CFG does not need to be analysed, for example,
152 when optimising a loop, only certain registers are of interest.
153 Perhaps there should be a bitmap argument to df_analyse to specify
154  which registers should be analysed?   */
155 
156 #include "config.h"
157 #include "system.h"
158 #include "rtl.h"
159 #include "tm_p.h"
160 #include "insn-config.h"
161 #include "recog.h"
162 #include "function.h"
163 #include "regs.h"
164 #include "obstack.h"
165 #include "hard-reg-set.h"
166 #include "basic-block.h"
167 #include "sbitmap.h"
168 #include "bitmap.h"
169 #include "df.h"
170 #include "fibheap.h"
171 
172 #define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE)	\
173   do							\
174     {							\
175       unsigned int node_;				\
176       EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_,	\
177       {(BB) = BASIC_BLOCK (node_); CODE;});		\
178     }							\
179   while (0)
180 
181 static struct obstack df_ref_obstack;
182 static struct df *ddf;
183 
184 static void df_reg_table_realloc PARAMS((struct df *, int));
185 #if 0
186 static void df_def_table_realloc PARAMS((struct df *, int));
187 #endif
188 static void df_insn_table_realloc PARAMS((struct df *, unsigned int));
189 static void df_bitmaps_alloc PARAMS((struct df *, int));
190 static void df_bitmaps_free PARAMS((struct df *, int));
191 static void df_free PARAMS((struct df *));
192 static void df_alloc PARAMS((struct df *, int));
193 
194 static rtx df_reg_clobber_gen PARAMS((unsigned int));
195 static rtx df_reg_use_gen PARAMS((unsigned int));
196 
197 static inline struct df_link *df_link_create PARAMS((struct ref *,
198 						     struct df_link *));
199 static struct df_link *df_ref_unlink PARAMS((struct df_link **, struct ref *));
200 static void df_def_unlink PARAMS((struct df *, struct ref *));
201 static void df_use_unlink PARAMS((struct df *, struct ref *));
202 static void df_insn_refs_unlink PARAMS ((struct df *, basic_block, rtx));
203 #if 0
204 static void df_bb_refs_unlink PARAMS ((struct df *, basic_block));
205 static void df_refs_unlink PARAMS ((struct df *, bitmap));
206 #endif
207 
208 static struct ref *df_ref_create PARAMS((struct df *,
209 					 rtx, rtx *, rtx,
210 					 enum df_ref_type, enum df_ref_flags));
211 static void df_ref_record_1 PARAMS((struct df *, rtx, rtx *,
212 				    rtx, enum df_ref_type,
213 				    enum df_ref_flags));
214 static void df_ref_record PARAMS((struct df *, rtx, rtx *,
215 				  rtx, enum df_ref_type,
216 				  enum df_ref_flags));
217 static void df_def_record_1 PARAMS((struct df *, rtx, basic_block, rtx));
218 static void df_defs_record PARAMS((struct df *, rtx, basic_block, rtx));
219 static void df_uses_record PARAMS((struct df *, rtx *,
220 				   enum df_ref_type, basic_block, rtx,
221 				   enum df_ref_flags));
222 static void df_insn_refs_record PARAMS((struct df *, basic_block, rtx));
223 static void df_bb_refs_record PARAMS((struct df *, basic_block));
224 static void df_refs_record PARAMS((struct df *, bitmap));
225 
226 static void df_bb_reg_def_chain_create PARAMS((struct df *, basic_block));
227 static void df_reg_def_chain_create PARAMS((struct df *, bitmap));
228 static void df_bb_reg_use_chain_create PARAMS((struct df *, basic_block));
229 static void df_reg_use_chain_create PARAMS((struct df *, bitmap));
230 static void df_bb_du_chain_create PARAMS((struct df *, basic_block, bitmap));
231 static void df_du_chain_create PARAMS((struct df *, bitmap));
232 static void df_bb_ud_chain_create PARAMS((struct df *, basic_block));
233 static void df_ud_chain_create PARAMS((struct df *, bitmap));
234 static void df_bb_rd_local_compute PARAMS((struct df *, basic_block));
235 static void df_rd_local_compute PARAMS((struct df *, bitmap));
236 static void df_bb_ru_local_compute PARAMS((struct df *, basic_block));
237 static void df_ru_local_compute PARAMS((struct df *, bitmap));
238 static void df_bb_lr_local_compute PARAMS((struct df *, basic_block));
239 static void df_lr_local_compute PARAMS((struct df *, bitmap));
240 static void df_bb_reg_info_compute PARAMS((struct df *, basic_block, bitmap));
241 static void df_reg_info_compute PARAMS((struct df *, bitmap));
242 
243 static int df_bb_luids_set PARAMS((struct df *df, basic_block));
244 static int df_luids_set PARAMS((struct df *df, bitmap));
245 
246 static int df_modified_p PARAMS ((struct df *, bitmap));
247 static int df_refs_queue PARAMS ((struct df *));
248 static int df_refs_process PARAMS ((struct df *));
249 static int df_bb_refs_update PARAMS ((struct df *, basic_block));
250 static int df_refs_update PARAMS ((struct df *));
251 static void df_analyse_1 PARAMS((struct df *, bitmap, int, int));
252 
253 static void df_insns_modify PARAMS((struct df *, basic_block,
254 				    rtx, rtx));
255 static int df_rtx_mem_replace PARAMS ((rtx *, void *));
256 static int df_rtx_reg_replace PARAMS ((rtx *, void *));
257 void df_refs_reg_replace PARAMS ((struct df *, bitmap,
258 					 struct df_link *, rtx, rtx));
259 
260 static int df_def_dominates_all_uses_p PARAMS((struct df *, struct ref *def));
261 static int df_def_dominates_uses_p PARAMS((struct df *,
262 					   struct ref *def, bitmap));
263 static struct ref *df_bb_regno_last_use_find PARAMS((struct df *, basic_block,
264 						     unsigned int));
265 static struct ref *df_bb_regno_first_def_find PARAMS((struct df *, basic_block,
266 						      unsigned int));
267 static struct ref *df_bb_insn_regno_last_use_find PARAMS((struct df *,
268 							  basic_block,
269 							  rtx, unsigned int));
270 static struct ref *df_bb_insn_regno_first_def_find PARAMS((struct df *,
271 							   basic_block,
272 							   rtx, unsigned int));
273 
274 static void df_chain_dump PARAMS((struct df_link *, FILE *file));
275 static void df_chain_dump_regno PARAMS((struct df_link *, FILE *file));
276 static void df_regno_debug PARAMS ((struct df *, unsigned int, FILE *));
277 static void df_ref_debug PARAMS ((struct df *, struct ref *, FILE *));
278 static void df_rd_transfer_function PARAMS ((int, int *, bitmap, bitmap,
279 					     bitmap, bitmap, void *));
280 static void df_ru_transfer_function PARAMS ((int, int *, bitmap, bitmap,
281 					     bitmap, bitmap, void *));
282 static void df_lr_transfer_function PARAMS ((int, int *, bitmap, bitmap,
283 					     bitmap, bitmap, void *));
284 static void hybrid_search_bitmap PARAMS ((basic_block, bitmap *, bitmap *,
285 					  bitmap *, bitmap *, enum df_flow_dir,
286 					  enum df_confluence_op,
287 					  transfer_function_bitmap,
288 					  sbitmap, sbitmap, void *));
289 static void hybrid_search_sbitmap PARAMS ((basic_block, sbitmap *, sbitmap *,
290 					   sbitmap *, sbitmap *, enum df_flow_dir,
291 					   enum df_confluence_op,
292 					   transfer_function_sbitmap,
293 					   sbitmap, sbitmap, void *));
294 static inline bool read_modify_subreg_p PARAMS ((rtx));
295 
296 
297 /* Local memory allocation/deallocation routines.  */
298 
299 
300 /* Increase the insn info table to have space for at least SIZE + 1
301    elements.  */
302 static void
df_insn_table_realloc(df,size)303 df_insn_table_realloc (df, size)
304      struct df *df;
305      unsigned int size;
306 {
307   size++;
308   if (size <= df->insn_size)
309     return;
310 
311   /* Make the table a little larger than requested, so we don't need
312      to enlarge it so often.  */
313   size += df->insn_size / 4;
314 
315   df->insns = (struct insn_info *)
316     xrealloc (df->insns, size * sizeof (struct insn_info));
317 
318   memset (df->insns + df->insn_size, 0,
319 	  (size - df->insn_size) * sizeof (struct insn_info));
320 
321   df->insn_size = size;
322 
323   if (! df->insns_modified)
324     {
325       df->insns_modified = BITMAP_XMALLOC ();
326       bitmap_zero (df->insns_modified);
327     }
328 }
329 
330 
331 /* Increase the reg info table by SIZE more elements.  */
332 static void
df_reg_table_realloc(df,size)333 df_reg_table_realloc (df, size)
334      struct df *df;
335      int size;
336 {
337   /* Make table 25 percent larger by default.  */
338   if (! size)
339     size = df->reg_size / 4;
340 
341   size += df->reg_size;
342   if (size < max_reg_num ())
343     size = max_reg_num ();
344 
345   df->regs = (struct reg_info *)
346     xrealloc (df->regs, size * sizeof (struct reg_info));
347 
348   /* Zero the new entries.  */
349   memset (df->regs + df->reg_size, 0,
350 	  (size - df->reg_size) * sizeof (struct reg_info));
351 
352   df->reg_size = size;
353 }
354 
355 
356 #if 0
357 /* Not currently used.  */
358 static void
359 df_def_table_realloc (df, size)
360      struct df *df;
361      int size;
362 {
363   int i;
364   struct ref *refs;
365 
366   /* Make table 25 percent larger by default.  */
367   if (! size)
368     size = df->def_size / 4;
369 
370   df->def_size += size;
371   df->defs = xrealloc (df->defs,
372 		       df->def_size * sizeof (*df->defs));
373 
374   /* Allocate a new block of memory and link into list of blocks
375      that will need to be freed later.  */
376 
377   refs = xmalloc (size * sizeof (*refs));
378 
379   /* Link all the new refs together, overloading the chain field.  */
380   for (i = 0; i < size - 1; i++)
381     refs[i].chain = (struct df_link *) (refs + i + 1);
382   refs[size - 1].chain = 0;
383 }
384 #endif
385 
386 
387 
388 /* Allocate bitmaps for each basic block.  */
389 static void
df_bitmaps_alloc(df,flags)390 df_bitmaps_alloc (df, flags)
391      struct df *df;
392      int flags;
393 {
394   int dflags = 0;
395   basic_block bb;
396 
397   /* Free the bitmaps if they need resizing.  */
398   if ((flags & DF_LR) && df->n_regs < (unsigned int) max_reg_num ())
399     dflags |= DF_LR | DF_RU;
400   if ((flags & DF_RU) && df->n_uses < df->use_id)
401     dflags |= DF_RU;
402   if ((flags & DF_RD) && df->n_defs < df->def_id)
403     dflags |= DF_RD;
404 
405   if (dflags)
406     df_bitmaps_free (df, dflags);
407 
408   df->n_defs = df->def_id;
409   df->n_uses = df->use_id;
410 
411   FOR_EACH_BB (bb)
412     {
413       struct bb_info *bb_info = DF_BB_INFO (df, bb);
414 
415       if (flags & DF_RD && ! bb_info->rd_in)
416 	{
417 	  /* Allocate bitmaps for reaching definitions.  */
418 	  bb_info->rd_kill = BITMAP_XMALLOC ();
419 	  bitmap_zero (bb_info->rd_kill);
420 	  bb_info->rd_gen = BITMAP_XMALLOC ();
421 	  bitmap_zero (bb_info->rd_gen);
422 	  bb_info->rd_in = BITMAP_XMALLOC ();
423 	  bb_info->rd_out = BITMAP_XMALLOC ();
424 	  bb_info->rd_valid = 0;
425 	}
426 
427       if (flags & DF_RU && ! bb_info->ru_in)
428 	{
429 	  /* Allocate bitmaps for upward exposed uses.  */
430 	  bb_info->ru_kill = BITMAP_XMALLOC ();
431 	  bitmap_zero (bb_info->ru_kill);
432 	  /* Note the lack of symmetry.  */
433 	  bb_info->ru_gen = BITMAP_XMALLOC ();
434 	  bitmap_zero (bb_info->ru_gen);
435 	  bb_info->ru_in = BITMAP_XMALLOC ();
436 	  bb_info->ru_out = BITMAP_XMALLOC ();
437 	  bb_info->ru_valid = 0;
438 	}
439 
440       if (flags & DF_LR && ! bb_info->lr_in)
441 	{
442 	  /* Allocate bitmaps for live variables.  */
443 	  bb_info->lr_def = BITMAP_XMALLOC ();
444 	  bitmap_zero (bb_info->lr_def);
445 	  bb_info->lr_use = BITMAP_XMALLOC ();
446 	  bitmap_zero (bb_info->lr_use);
447 	  bb_info->lr_in = BITMAP_XMALLOC ();
448 	  bb_info->lr_out = BITMAP_XMALLOC ();
449 	  bb_info->lr_valid = 0;
450 	}
451     }
452 }
453 
454 
455 /* Free bitmaps for each basic block.  */
456 static void
df_bitmaps_free(df,flags)457 df_bitmaps_free (df, flags)
458      struct df *df ATTRIBUTE_UNUSED;
459      int flags;
460 {
461   basic_block bb;
462 
463   FOR_EACH_BB (bb)
464     {
465       struct bb_info *bb_info = DF_BB_INFO (df, bb);
466 
467       if (!bb_info)
468 	continue;
469 
470       if ((flags & DF_RD) && bb_info->rd_in)
471 	{
472 	  /* Free bitmaps for reaching definitions.  */
473 	  BITMAP_XFREE (bb_info->rd_kill);
474 	  bb_info->rd_kill = NULL;
475 	  BITMAP_XFREE (bb_info->rd_gen);
476 	  bb_info->rd_gen = NULL;
477 	  BITMAP_XFREE (bb_info->rd_in);
478 	  bb_info->rd_in = NULL;
479 	  BITMAP_XFREE (bb_info->rd_out);
480 	  bb_info->rd_out = NULL;
481 	}
482 
483       if ((flags & DF_RU) && bb_info->ru_in)
484 	{
485 	  /* Free bitmaps for upward exposed uses.  */
486 	  BITMAP_XFREE (bb_info->ru_kill);
487 	  bb_info->ru_kill = NULL;
488 	  BITMAP_XFREE (bb_info->ru_gen);
489 	  bb_info->ru_gen = NULL;
490 	  BITMAP_XFREE (bb_info->ru_in);
491 	  bb_info->ru_in = NULL;
492 	  BITMAP_XFREE (bb_info->ru_out);
493 	  bb_info->ru_out = NULL;
494 	}
495 
496       if ((flags & DF_LR) && bb_info->lr_in)
497 	{
498 	  /* Free bitmaps for live variables.  */
499 	  BITMAP_XFREE (bb_info->lr_def);
500 	  bb_info->lr_def = NULL;
501 	  BITMAP_XFREE (bb_info->lr_use);
502 	  bb_info->lr_use = NULL;
503 	  BITMAP_XFREE (bb_info->lr_in);
504 	  bb_info->lr_in = NULL;
505 	  BITMAP_XFREE (bb_info->lr_out);
506 	  bb_info->lr_out = NULL;
507 	}
508     }
509   df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
510 }
511 
512 
513 /* Allocate and initialize dataflow memory.  */
514 static void
df_alloc(df,n_regs)515 df_alloc (df, n_regs)
516      struct df *df;
517      int n_regs;
518 {
519   int n_insns;
520   basic_block bb;
521 
522   gcc_obstack_init (&df_ref_obstack);
523 
524   /* Perhaps we should use LUIDs to save memory for the insn_refs
525      table.  This is only a small saving; a few pointers.  */
526   n_insns = get_max_uid () + 1;
527 
528   df->def_id = 0;
529   df->n_defs = 0;
530   /* Approximate number of defs by number of insns.  */
531   df->def_size = n_insns;
532   df->defs = xmalloc (df->def_size * sizeof (*df->defs));
533 
534   df->use_id = 0;
535   df->n_uses = 0;
536   /* Approximate number of uses by twice number of insns.  */
537   df->use_size = n_insns * 2;
538   df->uses = xmalloc (df->use_size * sizeof (*df->uses));
539 
540   df->n_regs = n_regs;
541   df->n_bbs = last_basic_block;
542 
543   /* Allocate temporary working array used during local dataflow analysis.  */
544   df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
545 
546   df_insn_table_realloc (df, n_insns);
547 
548   df_reg_table_realloc (df, df->n_regs);
549 
550   df->bbs_modified = BITMAP_XMALLOC ();
551   bitmap_zero (df->bbs_modified);
552 
553   df->flags = 0;
554 
555   df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
556 
557   df->all_blocks = BITMAP_XMALLOC ();
558   FOR_EACH_BB (bb)
559     bitmap_set_bit (df->all_blocks, bb->index);
560 }
561 
562 
563 /* Free all the dataflow info.  */
564 static void
df_free(df)565 df_free (df)
566      struct df *df;
567 {
568   df_bitmaps_free (df, DF_ALL);
569 
570   if (df->bbs)
571     free (df->bbs);
572   df->bbs = 0;
573 
574   if (df->insns)
575     free (df->insns);
576   df->insns = 0;
577   df->insn_size = 0;
578 
579   if (df->defs)
580     free (df->defs);
581   df->defs = 0;
582   df->def_size = 0;
583   df->def_id = 0;
584 
585   if (df->uses)
586     free (df->uses);
587   df->uses = 0;
588   df->use_size = 0;
589   df->use_id = 0;
590 
591   if (df->regs)
592     free (df->regs);
593   df->regs = 0;
594   df->reg_size = 0;
595 
596   if (df->bbs_modified)
597     BITMAP_XFREE (df->bbs_modified);
598   df->bbs_modified = 0;
599 
600   if (df->insns_modified)
601     BITMAP_XFREE (df->insns_modified);
602   df->insns_modified = 0;
603 
604   BITMAP_XFREE (df->all_blocks);
605   df->all_blocks = 0;
606 
607   obstack_free (&df_ref_obstack, NULL);
608 }
609 
610 /* Local miscellaneous routines.  */
611 
612 /* Return a USE for register REGNO.  */
df_reg_use_gen(regno)613 static rtx df_reg_use_gen (regno)
614      unsigned int regno;
615 {
616   rtx reg;
617   rtx use;
618 
619   reg = regno_reg_rtx[regno];
620 
621   use = gen_rtx_USE (GET_MODE (reg), reg);
622   return use;
623 }
624 
625 
626 /* Return a CLOBBER for register REGNO.  */
df_reg_clobber_gen(regno)627 static rtx df_reg_clobber_gen (regno)
628      unsigned int regno;
629 {
630   rtx reg;
631   rtx use;
632 
633   reg = regno_reg_rtx[regno];
634 
635   use = gen_rtx_CLOBBER (GET_MODE (reg), reg);
636   return use;
637 }
638 
639 /* Local chain manipulation routines.  */
640 
641 /* Create a link in a def-use or use-def chain.  */
642 static inline struct df_link *
df_link_create(ref,next)643 df_link_create (ref, next)
644      struct ref *ref;
645      struct df_link *next;
646 {
647   struct df_link *link;
648 
649   link = (struct df_link *) obstack_alloc (&df_ref_obstack,
650 					   sizeof (*link));
651   link->next = next;
652   link->ref = ref;
653   return link;
654 }
655 
656 
657 /* Add REF to chain head pointed to by PHEAD.  */
658 static struct df_link *
df_ref_unlink(phead,ref)659 df_ref_unlink (phead, ref)
660      struct df_link **phead;
661      struct ref *ref;
662 {
663   struct df_link *link = *phead;
664 
665   if (link)
666     {
667       if (! link->next)
668 	{
669 	  /* Only a single ref.  It must be the one we want.
670 	     If not, the def-use and use-def chains are likely to
671 	     be inconsistent.  */
672 	  if (link->ref != ref)
673 	    abort ();
674 	  /* Now have an empty chain.  */
675 	  *phead = NULL;
676 	}
677       else
678 	{
679 	  /* Multiple refs.  One of them must be us.  */
680 	  if (link->ref == ref)
681 	    *phead = link->next;
682 	  else
683 	    {
684 	      /* Follow chain.  */
685 	      for (; link->next; link = link->next)
686 		{
687 		  if (link->next->ref == ref)
688 		    {
689 		      /* Unlink from list.  */
690 		      link->next = link->next->next;
691 		      return link->next;
692 		    }
693 		}
694 	    }
695 	}
696     }
697   return link;
698 }
699 
700 
701 /* Unlink REF from all def-use/use-def chains, etc.  */
702 int
df_ref_remove(df,ref)703 df_ref_remove (df, ref)
704      struct df *df;
705      struct ref *ref;
706 {
707   if (DF_REF_REG_DEF_P (ref))
708     {
709       df_def_unlink (df, ref);
710       df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
711     }
712   else
713     {
714       df_use_unlink (df, ref);
715       df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
716     }
717   return 1;
718 }
719 
720 
721 /* Unlink DEF from use-def and reg-def chains.  */
722 static void
df_def_unlink(df,def)723 df_def_unlink (df, def)
724      struct df *df ATTRIBUTE_UNUSED;
725      struct ref *def;
726 {
727   struct df_link *du_link;
728   unsigned int dregno = DF_REF_REGNO (def);
729 
730   /* Follow def-use chain to find all the uses of this def.  */
731   for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
732     {
733       struct ref *use = du_link->ref;
734 
735       /* Unlink this def from the use-def chain.  */
736       df_ref_unlink (&DF_REF_CHAIN (use), def);
737     }
738   DF_REF_CHAIN (def) = 0;
739 
740   /* Unlink def from reg-def chain.  */
741   df_ref_unlink (&df->regs[dregno].defs, def);
742 
743   df->defs[DF_REF_ID (def)] = 0;
744 }
745 
746 
747 /* Unlink use from def-use and reg-use chains.  */
748 static void
df_use_unlink(df,use)749 df_use_unlink (df, use)
750      struct df *df ATTRIBUTE_UNUSED;
751      struct ref *use;
752 {
753   struct df_link *ud_link;
754   unsigned int uregno = DF_REF_REGNO (use);
755 
756   /* Follow use-def chain to find all the defs of this use.  */
757   for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
758     {
759       struct ref *def = ud_link->ref;
760 
761       /* Unlink this use from the def-use chain.  */
762       df_ref_unlink (&DF_REF_CHAIN (def), use);
763     }
764   DF_REF_CHAIN (use) = 0;
765 
766   /* Unlink use from reg-use chain.  */
767   df_ref_unlink (&df->regs[uregno].uses, use);
768 
769   df->uses[DF_REF_ID (use)] = 0;
770 }
771 
772 /* Local routines for recording refs.  */
773 
774 
775 /* Create a new ref of type DF_REF_TYPE for register REG at address
776    LOC within INSN of BB.  */
777 static struct ref *
df_ref_create(df,reg,loc,insn,ref_type,ref_flags)778 df_ref_create (df, reg, loc, insn, ref_type, ref_flags)
779      struct df *df;
780      rtx reg;
781      rtx *loc;
782      rtx insn;
783      enum df_ref_type ref_type;
784      enum df_ref_flags ref_flags;
785 {
786   struct ref *this_ref;
787   unsigned int uid;
788 
789   this_ref = (struct ref *) obstack_alloc (&df_ref_obstack,
790 					   sizeof (*this_ref));
791   DF_REF_REG (this_ref) = reg;
792   DF_REF_LOC (this_ref) = loc;
793   DF_REF_INSN (this_ref) = insn;
794   DF_REF_CHAIN (this_ref) = 0;
795   DF_REF_TYPE (this_ref) = ref_type;
796   DF_REF_FLAGS (this_ref) = ref_flags;
797   uid = INSN_UID (insn);
798 
799   if (ref_type == DF_REF_REG_DEF)
800     {
801       if (df->def_id >= df->def_size)
802 	{
803 	  /* Make table 25 percent larger.  */
804 	  df->def_size += (df->def_size / 4);
805 	  df->defs = xrealloc (df->defs,
806 			       df->def_size * sizeof (*df->defs));
807 	}
808       DF_REF_ID (this_ref) = df->def_id;
809       df->defs[df->def_id++] = this_ref;
810     }
811   else
812     {
813       if (df->use_id >= df->use_size)
814 	{
815 	  /* Make table 25 percent larger.  */
816 	  df->use_size += (df->use_size / 4);
817 	  df->uses = xrealloc (df->uses,
818 			       df->use_size * sizeof (*df->uses));
819 	}
820       DF_REF_ID (this_ref) = df->use_id;
821       df->uses[df->use_id++] = this_ref;
822     }
823   return this_ref;
824 }
825 
826 
827 /* Create a new reference of type DF_REF_TYPE for a single register REG,
828    used inside the LOC rtx of INSN.  */
829 static void
df_ref_record_1(df,reg,loc,insn,ref_type,ref_flags)830 df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags)
831      struct df *df;
832      rtx reg;
833      rtx *loc;
834      rtx insn;
835      enum df_ref_type ref_type;
836      enum df_ref_flags ref_flags;
837 {
838   df_ref_create (df, reg, loc, insn, ref_type, ref_flags);
839 }
840 
841 
842 /* Create new references of type DF_REF_TYPE for each part of register REG
843    at address LOC within INSN of BB.  */
844 static void
df_ref_record(df,reg,loc,insn,ref_type,ref_flags)845 df_ref_record (df, reg, loc, insn, ref_type, ref_flags)
846      struct df *df;
847      rtx reg;
848      rtx *loc;
849      rtx insn;
850      enum df_ref_type ref_type;
851      enum df_ref_flags ref_flags;
852 {
853   unsigned int regno;
854 
855   if (GET_CODE (reg) != REG && GET_CODE (reg) != SUBREG)
856     abort ();
857 
858   /* For the reg allocator we are interested in some SUBREG rtx's, but not
859      all.  Notably only those representing a word extraction from a multi-word
860      reg.  As written in the docu those should have the form
861      (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
862      XXX Is that true?  We could also use the global word_mode variable.  */
863   if (GET_CODE (reg) == SUBREG
864       && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
865 	  || GET_MODE_SIZE (GET_MODE (reg))
866 	       >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
867     {
868       loc = &SUBREG_REG (reg);
869       reg = *loc;
870     }
871 
872   regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
873   if (regno < FIRST_PSEUDO_REGISTER)
874     {
875       int i;
876       int endregno;
877 
878       if (! (df->flags & DF_HARD_REGS))
879 	return;
880 
881       /* GET_MODE (reg) is correct here.  We don't want to go into a SUBREG
882          for the mode, because we only want to add references to regs, which
883 	 are really referenced.  E.g. a (subreg:SI (reg:DI 0) 0) does _not_
884 	 reference the whole reg 0 in DI mode (which would also include
885 	 reg 1, at least, if 0 and 1 are SImode registers).  */
886       endregno = HARD_REGNO_NREGS (regno, GET_MODE (reg));
887       if (GET_CODE (reg) == SUBREG)
888         regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
889 				      SUBREG_BYTE (reg), GET_MODE (reg));
890       endregno += regno;
891 
892       for (i = regno; i < endregno; i++)
893 	df_ref_record_1 (df, regno_reg_rtx[i],
894 			 loc, insn, ref_type, ref_flags);
895     }
896   else
897     {
898       df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags);
899     }
900 }
901 
902 /* Writes to paradoxical subregs, or subregs which are too narrow
903    are read-modify-write.  */
904 
905 static inline bool
read_modify_subreg_p(x)906 read_modify_subreg_p (x)
907      rtx x;
908 {
909   unsigned int isize, osize;
910   if (GET_CODE (x) != SUBREG)
911     return false;
912   isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
913   osize = GET_MODE_SIZE (GET_MODE (x));
914   if (isize <= osize)
915     return true;
916   if (isize <= UNITS_PER_WORD)
917     return false;
918   if (osize >= UNITS_PER_WORD)
919     return false;
920   return true;
921 }
922 
923 /* Process all the registers defined in the rtx, X.  */
924 static void
df_def_record_1(df,x,bb,insn)925 df_def_record_1 (df, x, bb, insn)
926      struct df *df;
927      rtx x;
928      basic_block bb;
929      rtx insn;
930 {
931   rtx *loc = &SET_DEST (x);
932   rtx dst = *loc;
933   enum df_ref_flags flags = 0;
934 
935   /* Some targets place small structures in registers for
936      return values of functions.  */
937   if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
938     {
939       int i;
940 
941       for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
942 	df_def_record_1 (df, XVECEXP (dst, 0, i), bb, insn);
943       return;
944     }
945 
946 #ifdef CLASS_CANNOT_CHANGE_MODE
947   if (GET_CODE (dst) == SUBREG
948       && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (dst)),
949 				     GET_MODE (dst)))
950     flags |= DF_REF_MODE_CHANGE;
951 #endif
952 
953   /* May be, we should flag the use of strict_low_part somehow.  Might be
954      handy for the reg allocator.  */
955   while (GET_CODE (dst) == STRICT_LOW_PART
956 	 || GET_CODE (dst) == ZERO_EXTRACT
957 	 || GET_CODE (dst) == SIGN_EXTRACT
958 	 || read_modify_subreg_p (dst))
959     {
960       /* Strict low part always contains SUBREG, but we don't want to make
961 	 it appear outside, as whole register is always considered.  */
962       if (GET_CODE (dst) == STRICT_LOW_PART)
963 	{
964 	  loc = &XEXP (dst, 0);
965 	  dst = *loc;
966 	}
967 #ifdef CLASS_CANNOT_CHANGE_MODE
968       if (GET_CODE (dst) == SUBREG
969 	  && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (dst)),
970 				         GET_MODE (dst)))
971         flags |= DF_REF_MODE_CHANGE;
972 #endif
973       loc = &XEXP (dst, 0);
974       dst = *loc;
975       flags |= DF_REF_READ_WRITE;
976     }
977 
978   if (GET_CODE (dst) == REG
979       || (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG))
980     df_ref_record (df, dst, loc, insn, DF_REF_REG_DEF, flags);
981 }
982 
983 
984 /* Process all the registers defined in the pattern rtx, X.  */
985 static void
df_defs_record(df,x,bb,insn)986 df_defs_record (df, x, bb, insn)
987      struct df *df;
988      rtx x;
989      basic_block bb;
990      rtx insn;
991 {
992   RTX_CODE code = GET_CODE (x);
993 
994   if (code == SET || code == CLOBBER)
995     {
996       /* Mark the single def within the pattern.  */
997       df_def_record_1 (df, x, bb, insn);
998     }
999   else if (code == PARALLEL)
1000     {
1001       int i;
1002 
1003       /* Mark the multiple defs within the pattern.  */
1004       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1005 	{
1006 	  code = GET_CODE (XVECEXP (x, 0, i));
1007 	  if (code == SET || code == CLOBBER)
1008 	    df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
1009 	}
1010     }
1011 }
1012 
1013 
1014 /* Process all the registers used in the rtx at address LOC.  */
1015 static void
df_uses_record(df,loc,ref_type,bb,insn,flags)1016 df_uses_record (df, loc, ref_type, bb, insn, flags)
1017      struct df *df;
1018      rtx *loc;
1019      enum df_ref_type ref_type;
1020      basic_block bb;
1021      rtx insn;
1022      enum df_ref_flags flags;
1023 {
1024   RTX_CODE code;
1025   rtx x;
1026  retry:
1027   x = *loc;
1028   if (!x)
1029     return;
1030   code = GET_CODE (x);
1031   switch (code)
1032     {
1033     case LABEL_REF:
1034     case SYMBOL_REF:
1035     case CONST_INT:
1036     case CONST:
1037     case CONST_DOUBLE:
1038     case CONST_VECTOR:
1039     case PC:
1040     case CC0:
1041     case ADDR_VEC:
1042     case ADDR_DIFF_VEC:
1043       return;
1044 
1045     case CLOBBER:
1046       /* If we are clobbering a MEM, mark any registers inside the address
1047 	 as being used.  */
1048       if (GET_CODE (XEXP (x, 0)) == MEM)
1049 	df_uses_record (df, &XEXP (XEXP (x, 0), 0),
1050 			DF_REF_REG_MEM_STORE, bb, insn, flags);
1051 
1052       /* If we're clobbering a REG then we have a def so ignore.  */
1053       return;
1054 
1055     case MEM:
1056       df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn, flags);
1057       return;
1058 
1059     case SUBREG:
1060       /* While we're here, optimize this case.  */
1061 
1062       /* In case the SUBREG is not of a register, don't optimize.  */
1063       if (GET_CODE (SUBREG_REG (x)) != REG)
1064 	{
1065 	  loc = &SUBREG_REG (x);
1066 	  df_uses_record (df, loc, ref_type, bb, insn, flags);
1067 	  return;
1068 	}
1069 #ifdef CLASS_CANNOT_CHANGE_MODE
1070       if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
1071 				      GET_MODE (SUBREG_REG (x))))
1072         flags |= DF_REF_MODE_CHANGE;
1073 #endif
1074 
1075       /* ... Fall through ...  */
1076 
1077     case REG:
1078       /* See a register (or subreg) other than being set.  */
1079       df_ref_record (df, x, loc, insn, ref_type, flags);
1080       return;
1081 
1082     case SET:
1083       {
1084 	rtx dst = SET_DEST (x);
1085 
1086 	df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1087 
1088 	switch (GET_CODE (dst))
1089 	  {
1090 	    enum df_ref_flags use_flags;
1091 	    case SUBREG:
1092 	      if (read_modify_subreg_p (dst))
1093 		{
1094 		  use_flags = DF_REF_READ_WRITE;
1095 #ifdef CLASS_CANNOT_CHANGE_MODE
1096 		  if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
1097 						  GET_MODE (SUBREG_REG (dst))))
1098 		    use_flags |= DF_REF_MODE_CHANGE;
1099 #endif
1100 		  df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1101 				  insn, use_flags);
1102 		  break;
1103 		}
1104 	      /* ... FALLTHRU ...  */
1105 	    case REG:
1106 	    case PARALLEL:
1107 	    case PC:
1108 	    case CC0:
1109 		break;
1110 	    case MEM:
1111 	      df_uses_record (df, &XEXP (dst, 0),
1112 			      DF_REF_REG_MEM_STORE,
1113 			      bb, insn, 0);
1114 	      break;
1115 	    case STRICT_LOW_PART:
1116 	      /* A strict_low_part uses the whole reg not only the subreg.  */
1117 	      dst = XEXP (dst, 0);
1118 	      if (GET_CODE (dst) != SUBREG)
1119 		abort ();
1120 	      use_flags = DF_REF_READ_WRITE;
1121 #ifdef CLASS_CANNOT_CHANGE_MODE
1122 	      if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
1123 					      GET_MODE (SUBREG_REG (dst))))
1124 		use_flags |= DF_REF_MODE_CHANGE;
1125 #endif
1126 	      df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1127 			     insn, use_flags);
1128 	      break;
1129 	    case ZERO_EXTRACT:
1130 	    case SIGN_EXTRACT:
1131 	      df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn,
1132 			      DF_REF_READ_WRITE);
1133 	      df_uses_record (df, &XEXP (dst, 1), DF_REF_REG_USE, bb, insn, 0);
1134 	      df_uses_record (df, &XEXP (dst, 2), DF_REF_REG_USE, bb, insn, 0);
1135 	      dst = XEXP (dst, 0);
1136 	      break;
1137 	    default:
1138 	      abort ();
1139 	  }
1140 	return;
1141       }
1142 
1143     case RETURN:
1144       break;
1145 
1146     case ASM_OPERANDS:
1147     case UNSPEC_VOLATILE:
1148     case TRAP_IF:
1149     case ASM_INPUT:
1150       {
1151 	/* Traditional and volatile asm instructions must be considered to use
1152 	   and clobber all hard registers, all pseudo-registers and all of
1153 	   memory.  So must TRAP_IF and UNSPEC_VOLATILE operations.
1154 
1155 	   Consider for instance a volatile asm that changes the fpu rounding
1156 	   mode.  An insn should not be moved across this even if it only uses
1157 	   pseudo-regs because it might give an incorrectly rounded result.
1158 
1159 	   For now, just mark any regs we can find in ASM_OPERANDS as
1160 	   used.  */
1161 
1162 	/* For all ASM_OPERANDS, we must traverse the vector of input operands.
1163 	   We can not just fall through here since then we would be confused
1164 	   by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1165 	   traditional asms unlike their normal usage.  */
1166 	if (code == ASM_OPERANDS)
1167 	  {
1168 	    int j;
1169 
1170 	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1171 	      df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1172 			      DF_REF_REG_USE, bb, insn, 0);
1173 	    return;
1174 	  }
1175 	break;
1176       }
1177 
1178     case PRE_DEC:
1179     case POST_DEC:
1180     case PRE_INC:
1181     case POST_INC:
1182     case PRE_MODIFY:
1183     case POST_MODIFY:
1184       /* Catch the def of the register being modified.  */
1185       df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), insn, DF_REF_REG_DEF, DF_REF_READ_WRITE);
1186 
1187       /* ... Fall through to handle uses ...  */
1188 
1189     default:
1190       break;
1191     }
1192 
1193   /* Recursively scan the operands of this expression.  */
1194   {
1195     const char *fmt = GET_RTX_FORMAT (code);
1196     int i;
1197 
1198     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1199       {
1200 	if (fmt[i] == 'e')
1201 	  {
1202 	    /* Tail recursive case: save a function call level.  */
1203 	    if (i == 0)
1204 	      {
1205 		loc = &XEXP (x, 0);
1206 		goto retry;
1207 	      }
1208 	    df_uses_record (df, &XEXP (x, i), ref_type, bb, insn, flags);
1209 	  }
1210 	else if (fmt[i] == 'E')
1211 	  {
1212 	    int j;
1213 	    for (j = 0; j < XVECLEN (x, i); j++)
1214 	      df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1215 			      bb, insn, flags);
1216 	  }
1217       }
1218   }
1219 }
1220 
1221 
1222 /* Record all the df within INSN of basic block BB.  */
1223 static void
df_insn_refs_record(df,bb,insn)1224 df_insn_refs_record (df, bb, insn)
1225      struct df *df;
1226      basic_block bb;
1227      rtx insn;
1228 {
1229   int i;
1230 
1231   if (INSN_P (insn))
1232     {
1233       rtx note;
1234 
1235       /* Record register defs */
1236       df_defs_record (df, PATTERN (insn), bb, insn);
1237 
1238       if (df->flags & DF_EQUIV_NOTES)
1239 	for (note = REG_NOTES (insn); note;
1240 	     note = XEXP (note, 1))
1241 	  {
1242 	    switch (REG_NOTE_KIND (note))
1243 	      {
1244 	      case REG_EQUIV:
1245 	      case REG_EQUAL:
1246 		df_uses_record (df, &XEXP (note, 0), DF_REF_REG_USE,
1247 				bb, insn, 0);
1248 	      default:
1249 		break;
1250 	      }
1251 	  }
1252 
1253       if (GET_CODE (insn) == CALL_INSN)
1254 	{
1255 	  rtx note;
1256 	  rtx x;
1257 
1258 	  /* Record the registers used to pass arguments.  */
1259 	  for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1260 	       note = XEXP (note, 1))
1261 	    {
1262 	      if (GET_CODE (XEXP (note, 0)) == USE)
1263 		df_uses_record (df, &XEXP (XEXP (note, 0), 0), DF_REF_REG_USE,
1264 				bb, insn, 0);
1265 	    }
1266 
1267 	  /* The stack ptr is used (honorarily) by a CALL insn.  */
1268 	  x = df_reg_use_gen (STACK_POINTER_REGNUM);
1269 	  df_uses_record (df, &XEXP (x, 0), DF_REF_REG_USE, bb, insn, 0);
1270 
1271 	  if (df->flags & DF_HARD_REGS)
1272 	    {
1273 	      /* Calls may also reference any of the global registers,
1274 		 so they are recorded as used.  */
1275 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1276 		if (global_regs[i])
1277 		  {
1278 		    x = df_reg_use_gen (i);
1279 		    df_uses_record (df, &SET_DEST (x),
1280 				    DF_REF_REG_USE, bb, insn, 0);
1281 		  }
1282 	    }
1283 	}
1284 
1285       /* Record the register uses.  */
1286       df_uses_record (df, &PATTERN (insn),
1287 		      DF_REF_REG_USE, bb, insn, 0);
1288 
1289 
1290       if (GET_CODE (insn) == CALL_INSN)
1291 	{
1292 	  rtx note;
1293 
1294 	  if (df->flags & DF_HARD_REGS)
1295 	    {
1296 	      /* Kill all registers invalidated by a call.  */
1297 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1298 		if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1299 		  {
1300 		    rtx reg_clob = df_reg_clobber_gen (i);
1301 		    df_defs_record (df, reg_clob, bb, insn);
1302 		  }
1303 	    }
1304 
1305 	  /* There may be extra registers to be clobbered.  */
1306 	  for (note = CALL_INSN_FUNCTION_USAGE (insn);
1307 	       note;
1308 	       note = XEXP (note, 1))
1309 	    if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1310 	      df_defs_record (df, XEXP (note, 0), bb, insn);
1311 	}
1312     }
1313 }
1314 
1315 
1316 /* Record all the refs within the basic block BB.  */
1317 static void
df_bb_refs_record(df,bb)1318 df_bb_refs_record (df, bb)
1319      struct df *df;
1320      basic_block bb;
1321 {
1322   rtx insn;
1323 
1324   /* Scan the block an insn at a time from beginning to end.  */
1325   for (insn = bb->head; ; insn = NEXT_INSN (insn))
1326     {
1327       if (INSN_P (insn))
1328 	{
1329 	  /* Record defs within INSN.  */
1330 	  df_insn_refs_record (df, bb, insn);
1331 	}
1332       if (insn == bb->end)
1333 	break;
1334     }
1335 }
1336 
1337 
1338 /* Record all the refs in the basic blocks specified by BLOCKS.  */
1339 static void
df_refs_record(df,blocks)1340 df_refs_record (df, blocks)
1341      struct df *df;
1342      bitmap blocks;
1343 {
1344   basic_block bb;
1345 
1346   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1347     {
1348       df_bb_refs_record (df, bb);
1349     });
1350 }
1351 
1352 /* Dataflow analysis routines.  */
1353 
1354 
1355 /* Create reg-def chains for basic block BB.  These are a list of
1356    definitions for each register.  */
1357 static void
df_bb_reg_def_chain_create(df,bb)1358 df_bb_reg_def_chain_create (df, bb)
1359      struct df *df;
1360      basic_block bb;
1361 {
1362   rtx insn;
1363 
1364   /* Perhaps the defs should be sorted using a depth first search
1365      of the CFG (or possibly a breadth first search).  We currently
1366      scan the basic blocks in reverse order so that the first defs
1367      appear at the start of the chain.  */
1368 
1369   for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1370        insn = PREV_INSN (insn))
1371     {
1372       struct df_link *link;
1373       unsigned int uid = INSN_UID (insn);
1374 
1375       if (! INSN_P (insn))
1376 	continue;
1377 
1378       for (link = df->insns[uid].defs; link; link = link->next)
1379 	{
1380 	  struct ref *def = link->ref;
1381 	  unsigned int dregno = DF_REF_REGNO (def);
1382           /* Don't add ref's to the chain two times.  I.e. only add
1383              new refs.  XXX the same could be done by testing if the current
1384              insn is a modified (or a new) one.  This would be faster.  */
1385           if (DF_REF_ID (def) < df->def_id_save)
1386             continue;
1387 
1388 	  df->regs[dregno].defs
1389 	    = df_link_create (def, df->regs[dregno].defs);
1390 	}
1391     }
1392 }
1393 
1394 
1395 /* Create reg-def chains for each basic block within BLOCKS.  These
1396    are a list of definitions for each register.  */
1397 static void
df_reg_def_chain_create(df,blocks)1398 df_reg_def_chain_create (df, blocks)
1399      struct df *df;
1400      bitmap blocks;
1401 {
1402   basic_block bb;
1403 
1404   FOR_EACH_BB_IN_BITMAP/*_REV*/ (blocks, 0, bb,
1405     {
1406       df_bb_reg_def_chain_create (df, bb);
1407     });
1408 }
1409 
1410 
1411 /* Create reg-use chains for basic block BB.  These are a list of uses
1412    for each register.  */
1413 static void
df_bb_reg_use_chain_create(df,bb)1414 df_bb_reg_use_chain_create (df, bb)
1415      struct df *df;
1416      basic_block bb;
1417 {
1418   rtx insn;
1419 
1420   /* Scan in forward order so that the last uses appear at the
1421 	 start of the chain.  */
1422 
1423   for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1424        insn = NEXT_INSN (insn))
1425     {
1426       struct df_link *link;
1427       unsigned int uid = INSN_UID (insn);
1428 
1429       if (! INSN_P (insn))
1430 	continue;
1431 
1432       for (link = df->insns[uid].uses; link; link = link->next)
1433 	{
1434 	  struct ref *use = link->ref;
1435 	  unsigned int uregno = DF_REF_REGNO (use);
1436           /* Don't add ref's to the chain two times.  I.e. only add
1437              new refs.  XXX the same could be done by testing if the current
1438              insn is a modified (or a new) one.  This would be faster.  */
1439           if (DF_REF_ID (use) < df->use_id_save)
1440             continue;
1441 
1442 	  df->regs[uregno].uses
1443 	    = df_link_create (use, df->regs[uregno].uses);
1444 	}
1445     }
1446 }
1447 
1448 
1449 /* Create reg-use chains for each basic block within BLOCKS.  These
1450    are a list of uses for each register.  */
1451 static void
df_reg_use_chain_create(df,blocks)1452 df_reg_use_chain_create (df, blocks)
1453      struct df *df;
1454      bitmap blocks;
1455 {
1456   basic_block bb;
1457 
1458   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1459     {
1460       df_bb_reg_use_chain_create (df, bb);
1461     });
1462 }
1463 
1464 
1465 /* Create def-use chains from reaching use bitmaps for basic block BB.  */
1466 static void
df_bb_du_chain_create(df,bb,ru)1467 df_bb_du_chain_create (df, bb, ru)
1468      struct df *df;
1469      basic_block bb;
1470      bitmap ru;
1471 {
1472   struct bb_info *bb_info = DF_BB_INFO (df, bb);
1473   rtx insn;
1474 
1475   bitmap_copy (ru, bb_info->ru_out);
1476 
1477   /* For each def in BB create a linked list (chain) of uses
1478      reached from the def.  */
1479   for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1480        insn = PREV_INSN (insn))
1481     {
1482       struct df_link *def_link;
1483       struct df_link *use_link;
1484       unsigned int uid = INSN_UID (insn);
1485 
1486       if (! INSN_P (insn))
1487 	continue;
1488 
1489       /* For each def in insn...  */
1490       for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1491 	{
1492 	  struct ref *def = def_link->ref;
1493 	  unsigned int dregno = DF_REF_REGNO (def);
1494 
1495 	  DF_REF_CHAIN (def) = 0;
1496 
1497 	  /* While the reg-use chains are not essential, it
1498 	     is _much_ faster to search these short lists rather
1499 	     than all the reaching uses, especially for large functions.  */
1500 	  for (use_link = df->regs[dregno].uses; use_link;
1501 	       use_link = use_link->next)
1502 	    {
1503 	      struct ref *use = use_link->ref;
1504 
1505 	      if (bitmap_bit_p (ru, DF_REF_ID (use)))
1506 		{
1507 		  DF_REF_CHAIN (def)
1508 		    = df_link_create (use, DF_REF_CHAIN (def));
1509 
1510 		  bitmap_clear_bit (ru, DF_REF_ID (use));
1511 		}
1512 	    }
1513 	}
1514 
1515       /* For each use in insn...  */
1516       for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1517 	{
1518 	  struct ref *use = use_link->ref;
1519 	  bitmap_set_bit (ru, DF_REF_ID (use));
1520 	}
1521     }
1522 }
1523 
1524 
1525 /* Create def-use chains from reaching use bitmaps for basic blocks
1526    in BLOCKS.  */
1527 static void
df_du_chain_create(df,blocks)1528 df_du_chain_create (df, blocks)
1529      struct df *df;
1530      bitmap blocks;
1531 {
1532   bitmap ru;
1533   basic_block bb;
1534 
1535   ru = BITMAP_XMALLOC ();
1536 
1537   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1538     {
1539       df_bb_du_chain_create (df, bb, ru);
1540     });
1541 
1542   BITMAP_XFREE (ru);
1543 }
1544 
1545 
1546 /* Create use-def chains from reaching def bitmaps for basic block BB.  */
1547 static void
df_bb_ud_chain_create(df,bb)1548 df_bb_ud_chain_create (df, bb)
1549      struct df *df;
1550      basic_block bb;
1551 {
1552   struct bb_info *bb_info = DF_BB_INFO (df, bb);
1553   struct ref **reg_def_last = df->reg_def_last;
1554   rtx insn;
1555 
1556   memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1557 
1558   /* For each use in BB create a linked list (chain) of defs
1559      that reach the use.  */
1560   for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1561        insn = NEXT_INSN (insn))
1562     {
1563       unsigned int uid = INSN_UID (insn);
1564       struct df_link *use_link;
1565       struct df_link *def_link;
1566 
1567       if (! INSN_P (insn))
1568 	continue;
1569 
1570       /* For each use in insn...  */
1571       for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1572 	{
1573 	  struct ref *use = use_link->ref;
1574 	  unsigned int regno = DF_REF_REGNO (use);
1575 
1576 	  DF_REF_CHAIN (use) = 0;
1577 
1578 	  /* Has regno been defined in this BB yet?  If so, use
1579 	     the last def as the single entry for the use-def
1580 	     chain for this use.  Otherwise, we need to add all
1581 	     the defs using this regno that reach the start of
1582 	     this BB.  */
1583 	  if (reg_def_last[regno])
1584 	    {
1585 	      DF_REF_CHAIN (use)
1586 		= df_link_create (reg_def_last[regno], 0);
1587 	    }
1588 	  else
1589 	    {
1590 	      /* While the reg-def chains are not essential, it is
1591 		 _much_ faster to search these short lists rather than
1592 		 all the reaching defs, especially for large
1593 		 functions.  */
1594 	      for (def_link = df->regs[regno].defs; def_link;
1595 		   def_link = def_link->next)
1596 		{
1597 		  struct ref *def = def_link->ref;
1598 
1599 		  if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1600 		    {
1601 		      DF_REF_CHAIN (use)
1602 			= df_link_create (def, DF_REF_CHAIN (use));
1603 		    }
1604 		}
1605 	    }
1606 	}
1607 
1608 
1609       /* For each def in insn...record the last def of each reg.  */
1610       for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1611 	{
1612 	  struct ref *def = def_link->ref;
1613 	  int dregno = DF_REF_REGNO (def);
1614 
1615 	  reg_def_last[dregno] = def;
1616 	}
1617     }
1618 }
1619 
1620 
1621 /* Create use-def chains from reaching def bitmaps for basic blocks
1622    within BLOCKS.  */
1623 static void
df_ud_chain_create(df,blocks)1624 df_ud_chain_create (df, blocks)
1625      struct df *df;
1626      bitmap blocks;
1627 {
1628   basic_block bb;
1629 
1630   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1631     {
1632       df_bb_ud_chain_create (df, bb);
1633     });
1634 }
1635 
1636 
1637 
1638 static void
df_rd_transfer_function(bb,changed,in,out,gen,kill,data)1639 df_rd_transfer_function (bb, changed, in, out, gen, kill, data)
1640      int bb ATTRIBUTE_UNUSED;
1641      int *changed;
1642      bitmap in, out, gen, kill;
1643      void *data ATTRIBUTE_UNUSED;
1644 {
1645   *changed = bitmap_union_of_diff (out, gen, in, kill);
1646 }
1647 static void
df_ru_transfer_function(bb,changed,in,out,gen,kill,data)1648 df_ru_transfer_function (bb, changed, in, out, gen, kill, data)
1649      int bb ATTRIBUTE_UNUSED;
1650      int *changed;
1651      bitmap in, out, gen, kill;
1652      void *data ATTRIBUTE_UNUSED;
1653 {
1654   *changed = bitmap_union_of_diff (in, gen, out, kill);
1655 }
1656 
1657 static void
df_lr_transfer_function(bb,changed,in,out,use,def,data)1658 df_lr_transfer_function (bb, changed, in, out, use, def, data)
1659      int bb ATTRIBUTE_UNUSED;
1660      int *changed;
1661      bitmap in, out, use, def;
1662      void *data ATTRIBUTE_UNUSED;
1663 {
1664   *changed = bitmap_union_of_diff (in, use, out, def);
1665 }
1666 
1667 
1668 /* Compute local reaching def info for basic block BB.  */
1669 static void
df_bb_rd_local_compute(df,bb)1670 df_bb_rd_local_compute (df, bb)
1671      struct df *df;
1672      basic_block bb;
1673 {
1674   struct bb_info *bb_info = DF_BB_INFO (df, bb);
1675   rtx insn;
1676 
1677   for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1678        insn = NEXT_INSN (insn))
1679     {
1680       unsigned int uid = INSN_UID (insn);
1681       struct df_link *def_link;
1682 
1683       if (! INSN_P (insn))
1684 	continue;
1685 
1686       for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1687 	{
1688 	  struct ref *def = def_link->ref;
1689 	  unsigned int regno = DF_REF_REGNO (def);
1690 	  struct df_link *def2_link;
1691 
1692 	  for (def2_link = df->regs[regno].defs; def2_link;
1693 	       def2_link = def2_link->next)
1694 	    {
1695 	      struct ref *def2 = def2_link->ref;
1696 
1697 	      /* Add all defs of this reg to the set of kills.  This
1698 		 is greedy since many of these defs will not actually
1699 		 be killed by this BB but it keeps things a lot
1700 		 simpler.  */
1701 	      bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1702 
1703 	      /* Zap from the set of gens for this BB.  */
1704 	      bitmap_clear_bit (bb_info->rd_gen, DF_REF_ID (def2));
1705 	    }
1706 
1707 	  bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1708 	}
1709     }
1710 
1711   bb_info->rd_valid = 1;
1712 }
1713 
1714 
1715 /* Compute local reaching def info for each basic block within BLOCKS.  */
1716 static void
df_rd_local_compute(df,blocks)1717 df_rd_local_compute (df, blocks)
1718      struct df *df;
1719      bitmap blocks;
1720 {
1721   basic_block bb;
1722 
1723   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1724   {
1725     df_bb_rd_local_compute (df, bb);
1726   });
1727 }
1728 
1729 
1730 /* Compute local reaching use (upward exposed use) info for basic
1731    block BB.  */
1732 static void
df_bb_ru_local_compute(df,bb)1733 df_bb_ru_local_compute (df, bb)
1734      struct df *df;
1735      basic_block bb;
1736 {
1737   /* This is much more tricky than computing reaching defs.  With
1738      reaching defs, defs get killed by other defs.  With upwards
1739      exposed uses, these get killed by defs with the same regno.  */
1740 
1741   struct bb_info *bb_info = DF_BB_INFO (df, bb);
1742   rtx insn;
1743 
1744 
1745   for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1746        insn = PREV_INSN (insn))
1747     {
1748       unsigned int uid = INSN_UID (insn);
1749       struct df_link *def_link;
1750       struct df_link *use_link;
1751 
1752       if (! INSN_P (insn))
1753 	continue;
1754 
1755       for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1756 	{
1757 	  struct ref *def = def_link->ref;
1758 	  unsigned int dregno = DF_REF_REGNO (def);
1759 
1760 	  for (use_link = df->regs[dregno].uses; use_link;
1761 	       use_link = use_link->next)
1762 	    {
1763 	      struct ref *use = use_link->ref;
1764 
1765 	      /* Add all uses of this reg to the set of kills.  This
1766 		 is greedy since many of these uses will not actually
1767 		 be killed by this BB but it keeps things a lot
1768 		 simpler.  */
1769 	      bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1770 
1771 	      /* Zap from the set of gens for this BB.  */
1772 	      bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1773 	    }
1774 	}
1775 
1776       for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1777 	{
1778 	  struct ref *use = use_link->ref;
1779 	  /* Add use to set of gens in this BB.  */
1780 	  bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1781 	}
1782     }
1783   bb_info->ru_valid = 1;
1784 }
1785 
1786 
1787 /* Compute local reaching use (upward exposed use) info for each basic
1788    block within BLOCKS.  */
1789 static void
df_ru_local_compute(df,blocks)1790 df_ru_local_compute (df, blocks)
1791      struct df *df;
1792      bitmap blocks;
1793 {
1794   basic_block bb;
1795 
1796   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1797   {
1798     df_bb_ru_local_compute (df, bb);
1799   });
1800 }
1801 
1802 
1803 /* Compute local live variable info for basic block BB.  */
1804 static void
df_bb_lr_local_compute(df,bb)1805 df_bb_lr_local_compute (df, bb)
1806      struct df *df;
1807      basic_block bb;
1808 {
1809   struct bb_info *bb_info = DF_BB_INFO (df, bb);
1810   rtx insn;
1811 
1812   for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1813        insn = PREV_INSN (insn))
1814     {
1815       unsigned int uid = INSN_UID (insn);
1816       struct df_link *link;
1817 
1818       if (! INSN_P (insn))
1819 	continue;
1820 
1821       for (link = df->insns[uid].defs; link; link = link->next)
1822 	{
1823 	  struct ref *def = link->ref;
1824 	  unsigned int dregno = DF_REF_REGNO (def);
1825 
1826 	  /* Add def to set of defs in this BB.  */
1827 	  bitmap_set_bit (bb_info->lr_def, dregno);
1828 
1829 	  bitmap_clear_bit (bb_info->lr_use, dregno);
1830 	}
1831 
1832       for (link = df->insns[uid].uses; link; link = link->next)
1833 	{
1834 	  struct ref *use = link->ref;
1835 	  /* Add use to set of uses in this BB.  */
1836 	  bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
1837 	}
1838     }
1839   bb_info->lr_valid = 1;
1840 }
1841 
1842 
1843 /* Compute local live variable info for each basic block within BLOCKS.  */
1844 static void
df_lr_local_compute(df,blocks)1845 df_lr_local_compute (df, blocks)
1846      struct df *df;
1847      bitmap blocks;
1848 {
1849   basic_block bb;
1850 
1851   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1852   {
1853     df_bb_lr_local_compute (df, bb);
1854   });
1855 }
1856 
1857 
1858 /* Compute register info: lifetime, bb, and number of defs and uses
1859    for basic block BB.  */
1860 static void
df_bb_reg_info_compute(df,bb,live)1861 df_bb_reg_info_compute (df, bb, live)
1862      struct df *df;
1863      basic_block bb;
1864      bitmap live;
1865 {
1866   struct reg_info *reg_info = df->regs;
1867   struct bb_info *bb_info = DF_BB_INFO (df, bb);
1868   rtx insn;
1869 
1870   bitmap_copy (live, bb_info->lr_out);
1871 
1872   for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1873        insn = PREV_INSN (insn))
1874     {
1875       unsigned int uid = INSN_UID (insn);
1876       unsigned int regno;
1877       struct df_link *link;
1878 
1879       if (! INSN_P (insn))
1880 	continue;
1881 
1882       for (link = df->insns[uid].defs; link; link = link->next)
1883 	{
1884 	  struct ref *def = link->ref;
1885 	  unsigned int dregno = DF_REF_REGNO (def);
1886 
1887 	  /* Kill this register.  */
1888 	  bitmap_clear_bit (live, dregno);
1889 	  reg_info[dregno].n_defs++;
1890 	}
1891 
1892       for (link = df->insns[uid].uses; link; link = link->next)
1893 	{
1894 	  struct ref *use = link->ref;
1895 	  unsigned int uregno = DF_REF_REGNO (use);
1896 
1897 	  /* This register is now live.  */
1898 	  bitmap_set_bit (live, uregno);
1899 	  reg_info[uregno].n_uses++;
1900 	}
1901 
1902       /* Increment lifetimes of all live registers.  */
1903       EXECUTE_IF_SET_IN_BITMAP (live, 0, regno,
1904       {
1905 	reg_info[regno].lifetime++;
1906       });
1907     }
1908 }
1909 
1910 
1911 /* Compute register info: lifetime, bb, and number of defs and uses.  */
1912 static void
df_reg_info_compute(df,blocks)1913 df_reg_info_compute (df, blocks)
1914      struct df *df;
1915      bitmap blocks;
1916 {
1917   basic_block bb;
1918   bitmap live;
1919 
1920   live = BITMAP_XMALLOC ();
1921 
1922   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1923   {
1924     df_bb_reg_info_compute (df, bb, live);
1925   });
1926 
1927   BITMAP_XFREE (live);
1928 }
1929 
1930 
1931 /* Assign LUIDs for BB.  */
1932 static int
df_bb_luids_set(df,bb)1933 df_bb_luids_set (df, bb)
1934      struct df *df;
1935      basic_block bb;
1936 {
1937   rtx insn;
1938   int luid = 0;
1939 
1940   /* The LUIDs are monotonically increasing for each basic block.  */
1941 
1942   for (insn = bb->head; ; insn = NEXT_INSN (insn))
1943     {
1944       if (INSN_P (insn))
1945 	DF_INSN_LUID (df, insn) = luid++;
1946       DF_INSN_LUID (df, insn) = luid;
1947 
1948       if (insn == bb->end)
1949 	break;
1950     }
1951   return luid;
1952 }
1953 
1954 
1955 /* Assign LUIDs for each basic block within BLOCKS.  */
1956 static int
df_luids_set(df,blocks)1957 df_luids_set (df, blocks)
1958      struct df *df;
1959      bitmap blocks;
1960 {
1961   basic_block bb;
1962   int total = 0;
1963 
1964   FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1965     {
1966       total += df_bb_luids_set (df, bb);
1967     });
1968   return total;
1969 }
1970 
1971 /* Perform dataflow analysis using existing DF structure for blocks
1972    within BLOCKS.  If BLOCKS is zero, use all basic blocks in the CFG.  */
1973 static void
df_analyse_1(df,blocks,flags,update)1974 df_analyse_1 (df, blocks, flags, update)
1975      struct df *df;
1976      bitmap blocks;
1977      int flags;
1978      int update;
1979 {
1980   int aflags;
1981   int dflags;
1982   int i;
1983   basic_block bb;
1984 
1985   dflags = 0;
1986   aflags = flags;
1987   if (flags & DF_UD_CHAIN)
1988     aflags |= DF_RD | DF_RD_CHAIN;
1989 
1990   if (flags & DF_DU_CHAIN)
1991     aflags |= DF_RU;
1992 
1993   if (flags & DF_RU)
1994     aflags |= DF_RU_CHAIN;
1995 
1996   if (flags & DF_REG_INFO)
1997     aflags |= DF_LR;
1998 
1999   if (! blocks)
2000     blocks = df->all_blocks;
2001 
2002   df->flags = flags;
2003   if (update)
2004     {
2005       df_refs_update (df);
2006       /* More fine grained incremental dataflow analysis would be
2007 	 nice.  For now recompute the whole shebang for the
2008 	 modified blocks.  */
2009 #if 0
2010       df_refs_unlink (df, blocks);
2011 #endif
2012       /* All the def-use, use-def chains can be potentially
2013 	 modified by changes in one block.  The size of the
2014 	 bitmaps can also change.  */
2015     }
2016   else
2017     {
2018       /* Scan the function for all register defs and uses.  */
2019       df_refs_queue (df);
2020       df_refs_record (df, blocks);
2021 
2022       /* Link all the new defs and uses to the insns.  */
2023       df_refs_process (df);
2024     }
2025 
2026   /* Allocate the bitmaps now the total number of defs and uses are
2027      known.  If the number of defs or uses have changed, then
2028      these bitmaps need to be reallocated.  */
2029   df_bitmaps_alloc (df, aflags);
2030 
2031   /* Set the LUIDs for each specified basic block.  */
2032   df_luids_set (df, blocks);
2033 
2034   /* Recreate reg-def and reg-use chains from scratch so that first
2035      def is at the head of the reg-def chain and the last use is at
2036      the head of the reg-use chain.  This is only important for
2037      regs local to a basic block as it speeds up searching.  */
2038   if (aflags & DF_RD_CHAIN)
2039     {
2040       df_reg_def_chain_create (df, blocks);
2041     }
2042 
2043   if (aflags & DF_RU_CHAIN)
2044     {
2045       df_reg_use_chain_create (df, blocks);
2046     }
2047 
2048   df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
2049   df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
2050   df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2051   df->inverse_dfs_map = xmalloc (sizeof (int) * last_basic_block);
2052   df->inverse_rc_map = xmalloc (sizeof (int) * last_basic_block);
2053   df->inverse_rts_map = xmalloc (sizeof (int) * last_basic_block);
2054 
2055   flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2056   flow_reverse_top_sort_order_compute (df->rts_order);
2057   for (i = 0; i < n_basic_blocks; i++)
2058     {
2059       df->inverse_dfs_map[df->dfs_order[i]] = i;
2060       df->inverse_rc_map[df->rc_order[i]] = i;
2061       df->inverse_rts_map[df->rts_order[i]] = i;
2062     }
2063   if (aflags & DF_RD)
2064     {
2065       /* Compute the sets of gens and kills for the defs of each bb.  */
2066       df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2067       {
2068 	bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2069 	bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2070 	bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
2071 	bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
2072 	FOR_EACH_BB (bb)
2073 	  {
2074 	    in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
2075 	    out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
2076 	    gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
2077 	    kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
2078 	  }
2079 	iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2080 				   FORWARD, UNION, df_rd_transfer_function,
2081 				   df->inverse_rc_map, NULL);
2082 	free (in);
2083 	free (out);
2084 	free (gen);
2085 	free (kill);
2086       }
2087     }
2088 
2089   if (aflags & DF_UD_CHAIN)
2090     {
2091       /* Create use-def chains.  */
2092       df_ud_chain_create (df, df->all_blocks);
2093 
2094       if (! (flags & DF_RD))
2095 	dflags |= DF_RD;
2096     }
2097 
2098   if (aflags & DF_RU)
2099     {
2100       /* Compute the sets of gens and kills for the upwards exposed
2101 	 uses in each bb.  */
2102       df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2103       {
2104 	bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2105 	bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2106 	bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
2107 	bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
2108 	FOR_EACH_BB (bb)
2109 	  {
2110 	    in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
2111 	    out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
2112 	    gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
2113 	    kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
2114 	  }
2115 	iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2116 				   BACKWARD, UNION, df_ru_transfer_function,
2117 				   df->inverse_rts_map, NULL);
2118 	free (in);
2119 	free (out);
2120 	free (gen);
2121 	free (kill);
2122       }
2123     }
2124 
2125   if (aflags & DF_DU_CHAIN)
2126     {
2127       /* Create def-use chains.  */
2128       df_du_chain_create (df, df->all_blocks);
2129 
2130       if (! (flags & DF_RU))
2131 	dflags |= DF_RU;
2132     }
2133 
2134   /* Free up bitmaps that are no longer required.  */
2135   if (dflags)
2136     df_bitmaps_free (df, dflags);
2137 
2138   if (aflags & DF_LR)
2139     {
2140       /* Compute the sets of defs and uses of live variables.  */
2141       df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2142       {
2143 	bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2144 	bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2145 	bitmap *use = xmalloc (sizeof (bitmap) * last_basic_block);
2146 	bitmap *def = xmalloc (sizeof (bitmap) * last_basic_block);
2147 	FOR_EACH_BB (bb)
2148 	  {
2149 	    in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
2150 	    out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
2151 	    use[bb->index] = DF_BB_INFO (df, bb)->lr_use;
2152 	    def[bb->index] = DF_BB_INFO (df, bb)->lr_def;
2153 	  }
2154 	iterative_dataflow_bitmap (in, out, use, def, df->all_blocks,
2155 				   BACKWARD, UNION, df_lr_transfer_function,
2156 				   df->inverse_rts_map, NULL);
2157 	free (in);
2158 	free (out);
2159 	free (use);
2160 	free (def);
2161       }
2162     }
2163 
2164   if (aflags & DF_REG_INFO)
2165     {
2166       df_reg_info_compute (df, df->all_blocks);
2167     }
2168   free (df->dfs_order);
2169   free (df->rc_order);
2170   free (df->rts_order);
2171   free (df->inverse_rc_map);
2172   free (df->inverse_dfs_map);
2173   free (df->inverse_rts_map);
2174 }
2175 
2176 
2177 /* Initialize dataflow analysis.  */
2178 struct df *
df_init()2179 df_init ()
2180 {
2181   struct df *df;
2182 
2183   df = xcalloc (1, sizeof (struct df));
2184 
2185   /* Squirrel away a global for debugging.  */
2186   ddf = df;
2187 
2188   return df;
2189 }
2190 
2191 
2192 /* Start queuing refs.  */
2193 static int
df_refs_queue(df)2194 df_refs_queue (df)
2195      struct df *df;
2196 {
2197   df->def_id_save = df->def_id;
2198   df->use_id_save = df->use_id;
2199   /* ???? Perhaps we should save current obstack state so that we can
2200      unwind it.  */
2201   return 0;
2202 }
2203 
2204 
2205 /* Process queued refs.  */
2206 static int
df_refs_process(df)2207 df_refs_process (df)
2208      struct df *df;
2209 {
2210   unsigned int i;
2211 
2212   /* Build new insn-def chains.  */
2213   for (i = df->def_id_save; i != df->def_id; i++)
2214     {
2215       struct ref *def = df->defs[i];
2216       unsigned int uid = DF_REF_INSN_UID (def);
2217 
2218       /* Add def to head of def list for INSN.  */
2219       df->insns[uid].defs
2220 	= df_link_create (def, df->insns[uid].defs);
2221     }
2222 
2223   /* Build new insn-use chains.  */
2224   for (i = df->use_id_save; i != df->use_id; i++)
2225     {
2226       struct ref *use = df->uses[i];
2227       unsigned int uid = DF_REF_INSN_UID (use);
2228 
2229       /* Add use to head of use list for INSN.  */
2230       df->insns[uid].uses
2231 	= df_link_create (use, df->insns[uid].uses);
2232     }
2233   return 0;
2234 }
2235 
2236 
2237 /* Update refs for basic block BB.  */
2238 static int
df_bb_refs_update(df,bb)2239 df_bb_refs_update (df, bb)
2240      struct df *df;
2241      basic_block bb;
2242 {
2243   rtx insn;
2244   int count = 0;
2245 
2246   /* While we have to scan the chain of insns for this BB, we don't
2247      need to allocate and queue a long chain of BB/INSN pairs.  Using
2248      a bitmap for insns_modified saves memory and avoids queuing
2249      duplicates.  */
2250 
2251   for (insn = bb->head; ; insn = NEXT_INSN (insn))
2252     {
2253       unsigned int uid;
2254 
2255       uid = INSN_UID (insn);
2256 
2257       if (bitmap_bit_p (df->insns_modified, uid))
2258 	{
2259 	  /* Delete any allocated refs of this insn.  MPH,  FIXME.  */
2260 	  df_insn_refs_unlink (df, bb, insn);
2261 
2262 	  /* Scan the insn for refs.  */
2263 	  df_insn_refs_record (df, bb, insn);
2264 
2265 	  count++;
2266 	}
2267       if (insn == bb->end)
2268 	break;
2269     }
2270   return count;
2271 }
2272 
2273 
2274 /* Process all the modified/deleted insns that were queued.  */
2275 static int
df_refs_update(df)2276 df_refs_update (df)
2277      struct df *df;
2278 {
2279   basic_block bb;
2280   int count = 0;
2281 
2282   if ((unsigned int) max_reg_num () >= df->reg_size)
2283     df_reg_table_realloc (df, 0);
2284 
2285   df_refs_queue (df);
2286 
2287   FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2288     {
2289       count += df_bb_refs_update (df, bb);
2290     });
2291 
2292   df_refs_process (df);
2293   return count;
2294 }
2295 
2296 
2297 /* Return nonzero if any of the requested blocks in the bitmap
2298    BLOCKS have been modified.  */
2299 static int
df_modified_p(df,blocks)2300 df_modified_p (df, blocks)
2301      struct df *df;
2302      bitmap blocks;
2303 {
2304   int update = 0;
2305   basic_block bb;
2306 
2307   if (!df->n_bbs)
2308     return 0;
2309 
2310   FOR_EACH_BB (bb)
2311     if (bitmap_bit_p (df->bbs_modified, bb->index)
2312 	&& (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->index)))
2313     {
2314       update = 1;
2315       break;
2316     }
2317 
2318   return update;
2319 }
2320 
2321 
2322 /* Analyse dataflow info for the basic blocks specified by the bitmap
2323    BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2324    modified blocks if BLOCKS is -1.  */
2325 int
df_analyse(df,blocks,flags)2326 df_analyse (df, blocks, flags)
2327      struct df *df;
2328      bitmap blocks;
2329      int flags;
2330 {
2331   int update;
2332 
2333   /* We could deal with additional basic blocks being created by
2334      rescanning everything again.  */
2335   if (df->n_bbs && df->n_bbs != (unsigned int) last_basic_block)
2336     abort ();
2337 
2338   update = df_modified_p (df, blocks);
2339   if (update || (flags != df->flags))
2340     {
2341       if (! blocks)
2342 	{
2343 	  if (df->n_bbs)
2344 	    {
2345 	      /* Recompute everything from scratch.  */
2346 	      df_free (df);
2347 	    }
2348 	  /* Allocate and initialize data structures.  */
2349 	  df_alloc (df, max_reg_num ());
2350 	  df_analyse_1 (df, 0, flags, 0);
2351 	  update = 1;
2352 	}
2353       else
2354 	{
2355 	  if (blocks == (bitmap) -1)
2356 	    blocks = df->bbs_modified;
2357 
2358 	  if (! df->n_bbs)
2359 	    abort ();
2360 
2361 	  df_analyse_1 (df, blocks, flags, 1);
2362 	  bitmap_zero (df->bbs_modified);
2363 	  bitmap_zero (df->insns_modified);
2364 	}
2365     }
2366   return update;
2367 }
2368 
2369 
2370 /* Free all the dataflow info and the DF structure.  */
2371 void
df_finish(df)2372 df_finish (df)
2373      struct df *df;
2374 {
2375   df_free (df);
2376   free (df);
2377 }
2378 
2379 
2380 /* Unlink INSN from its reference information.  */
2381 static void
df_insn_refs_unlink(df,bb,insn)2382 df_insn_refs_unlink (df, bb, insn)
2383      struct df *df;
2384      basic_block bb ATTRIBUTE_UNUSED;
2385      rtx insn;
2386 {
2387   struct df_link *link;
2388   unsigned int uid;
2389 
2390   uid = INSN_UID (insn);
2391 
2392   /* Unlink all refs defined by this insn.  */
2393   for (link = df->insns[uid].defs; link; link = link->next)
2394     df_def_unlink (df, link->ref);
2395 
2396   /* Unlink all refs used by this insn.  */
2397   for (link = df->insns[uid].uses; link; link = link->next)
2398     df_use_unlink (df, link->ref);
2399 
2400   df->insns[uid].defs = 0;
2401   df->insns[uid].uses = 0;
2402 }
2403 
2404 
2405 #if 0
2406 /* Unlink all the insns within BB from their reference information.  */
2407 static void
2408 df_bb_refs_unlink (df, bb)
2409      struct df *df;
2410      basic_block bb;
2411 {
2412   rtx insn;
2413 
2414   /* Scan the block an insn at a time from beginning to end.  */
2415   for (insn = bb->head; ; insn = NEXT_INSN (insn))
2416     {
2417       if (INSN_P (insn))
2418 	{
2419 	  /* Unlink refs for INSN.  */
2420 	  df_insn_refs_unlink (df, bb, insn);
2421 	}
2422       if (insn == bb->end)
2423 	break;
2424     }
2425 }
2426 
2427 
2428 /* Unlink all the refs in the basic blocks specified by BLOCKS.
2429    Not currently used.  */
2430 static void
2431 df_refs_unlink (df, blocks)
2432      struct df *df;
2433      bitmap blocks;
2434 {
2435   basic_block bb;
2436 
2437   if (blocks)
2438     {
2439       FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2440       {
2441 	df_bb_refs_unlink (df, bb);
2442       });
2443     }
2444   else
2445     {
2446       FOR_EACH_BB (bb)
2447 	df_bb_refs_unlink (df, bb);
2448     }
2449 }
2450 #endif
2451 
2452 /* Functions to modify insns.  */
2453 
2454 
2455 /* Delete INSN and all its reference information.  */
2456 rtx
df_insn_delete(df,bb,insn)2457 df_insn_delete (df, bb, insn)
2458      struct df *df;
2459      basic_block bb ATTRIBUTE_UNUSED;
2460      rtx insn;
2461 {
2462   /* If the insn is a jump, we should perhaps call delete_insn to
2463      handle the JUMP_LABEL?  */
2464 
2465   /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label.  */
2466   if (insn == bb->head)
2467     abort ();
2468 
2469   /* Delete the insn.  */
2470   delete_insn (insn);
2471 
2472   df_insn_modify (df, bb, insn);
2473 
2474   return NEXT_INSN (insn);
2475 }
2476 
2477 
2478 /* Mark that INSN within BB may have changed  (created/modified/deleted).
2479    This may be called multiple times for the same insn.  There is no
2480    harm calling this function if the insn wasn't changed; it will just
2481    slow down the rescanning of refs.  */
2482 void
df_insn_modify(df,bb,insn)2483 df_insn_modify (df, bb, insn)
2484      struct df *df;
2485      basic_block bb;
2486      rtx insn;
2487 {
2488   unsigned int uid;
2489 
2490   uid = INSN_UID (insn);
2491   if (uid >= df->insn_size)
2492     df_insn_table_realloc (df, uid);
2493 
2494   bitmap_set_bit (df->bbs_modified, bb->index);
2495   bitmap_set_bit (df->insns_modified, uid);
2496 
2497   /* For incremental updating on the fly, perhaps we could make a copy
2498      of all the refs of the original insn and turn them into
2499      anti-refs.  When df_refs_update finds these anti-refs, it annihilates
2500      the original refs.  If validate_change fails then these anti-refs
2501      will just get ignored.  */
2502 }
2503 
2504 
2505 typedef struct replace_args {
2506   rtx match;
2507   rtx replacement;
2508   rtx insn;
2509   int modified;
2510 } replace_args;
2511 
2512 
2513 /* Replace mem pointed to by PX with its associated pseudo register.
2514    DATA is actually a pointer to a structure describing the
2515    instruction currently being scanned and the MEM we are currently
2516    replacing.  */
2517 static int
df_rtx_mem_replace(px,data)2518 df_rtx_mem_replace (px, data)
2519      rtx *px;
2520      void *data;
2521 {
2522   replace_args *args = (replace_args *) data;
2523   rtx mem = *px;
2524 
2525   if (mem == NULL_RTX)
2526     return 0;
2527 
2528   switch (GET_CODE (mem))
2529     {
2530     case MEM:
2531       break;
2532 
2533     case CONST_DOUBLE:
2534       /* We're not interested in the MEM associated with a
2535 	 CONST_DOUBLE, so there's no need to traverse into one.  */
2536       return -1;
2537 
2538     default:
2539       /* This is not a MEM.  */
2540       return 0;
2541     }
2542 
2543   if (!rtx_equal_p (args->match, mem))
2544     /* This is not the MEM we are currently replacing.  */
2545     return 0;
2546 
2547   /* Actually replace the MEM.  */
2548   validate_change (args->insn, px, args->replacement, 1);
2549   args->modified++;
2550 
2551   return 0;
2552 }
2553 
2554 
2555 int
df_insn_mem_replace(df,bb,insn,mem,reg)2556 df_insn_mem_replace (df, bb, insn, mem, reg)
2557      struct df *df;
2558      basic_block bb;
2559      rtx insn;
2560      rtx mem;
2561      rtx reg;
2562 {
2563   replace_args args;
2564 
2565   args.insn = insn;
2566   args.match = mem;
2567   args.replacement = reg;
2568   args.modified = 0;
2569 
2570   /* Search and replace all matching mems within insn.  */
2571   for_each_rtx (&insn, df_rtx_mem_replace, &args);
2572 
2573   if (args.modified)
2574     df_insn_modify (df, bb, insn);
2575 
2576   /* ???? FIXME.  We may have a new def or one or more new uses of REG
2577      in INSN.  REG should be a new pseudo so it won't affect the
2578      dataflow information that we currently have.  We should add
2579      the new uses and defs to INSN and then recreate the chains
2580      when df_analyse is called.  */
2581   return args.modified;
2582 }
2583 
2584 
2585 /* Replace one register with another.  Called through for_each_rtx; PX
2586    points to the rtx being scanned.  DATA is actually a pointer to a
2587    structure of arguments.  */
2588 static int
df_rtx_reg_replace(px,data)2589 df_rtx_reg_replace (px, data)
2590      rtx *px;
2591      void *data;
2592 {
2593   rtx x = *px;
2594   replace_args *args = (replace_args *) data;
2595 
2596   if (x == NULL_RTX)
2597     return 0;
2598 
2599   if (x == args->match)
2600     {
2601       validate_change (args->insn, px, args->replacement, 1);
2602       args->modified++;
2603     }
2604 
2605   return 0;
2606 }
2607 
2608 
2609 /* Replace the reg within every ref on CHAIN that is within the set
2610    BLOCKS of basic blocks with NEWREG.  Also update the regs within
2611    REG_NOTES.  */
2612 void
df_refs_reg_replace(df,blocks,chain,oldreg,newreg)2613 df_refs_reg_replace (df, blocks, chain, oldreg, newreg)
2614      struct df *df;
2615      bitmap blocks;
2616      struct df_link *chain;
2617      rtx oldreg;
2618      rtx newreg;
2619 {
2620   struct df_link *link;
2621   replace_args args;
2622 
2623   if (! blocks)
2624     blocks = df->all_blocks;
2625 
2626   args.match = oldreg;
2627   args.replacement = newreg;
2628   args.modified = 0;
2629 
2630   for (link = chain; link; link = link->next)
2631     {
2632       struct ref *ref = link->ref;
2633       rtx insn = DF_REF_INSN (ref);
2634 
2635       if (! INSN_P (insn))
2636 	continue;
2637 
2638       if (bitmap_bit_p (blocks, DF_REF_BBNO (ref)))
2639 	{
2640 	  df_ref_reg_replace (df, ref, oldreg, newreg);
2641 
2642 	  /* Replace occurrences of the reg within the REG_NOTES.  */
2643 	  if ((! link->next || DF_REF_INSN (ref)
2644 	      != DF_REF_INSN (link->next->ref))
2645 	      && REG_NOTES (insn))
2646 	    {
2647 	      args.insn = insn;
2648 	      for_each_rtx (&REG_NOTES (insn), df_rtx_reg_replace, &args);
2649 	    }
2650 	}
2651       else
2652 	{
2653 	  /* Temporary check to ensure that we have a grip on which
2654 	     regs should be replaced.  */
2655 	  abort ();
2656 	}
2657     }
2658 }
2659 
2660 
2661 /* Replace all occurrences of register OLDREG with register NEWREG in
2662    blocks defined by bitmap BLOCKS.  This also replaces occurrences of
2663    OLDREG in the REG_NOTES but only for insns containing OLDREG.  This
2664    routine expects the reg-use and reg-def chains to be valid.  */
2665 int
df_reg_replace(df,blocks,oldreg,newreg)2666 df_reg_replace (df, blocks, oldreg, newreg)
2667      struct df *df;
2668      bitmap blocks;
2669      rtx oldreg;
2670      rtx newreg;
2671 {
2672   unsigned int oldregno = REGNO (oldreg);
2673 
2674   df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2675   df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2676   return 1;
2677 }
2678 
2679 
2680 /* Try replacing the reg within REF with NEWREG.  Do not modify
2681    def-use/use-def chains.  */
2682 int
df_ref_reg_replace(df,ref,oldreg,newreg)2683 df_ref_reg_replace (df, ref, oldreg, newreg)
2684      struct df *df;
2685      struct ref *ref;
2686      rtx oldreg;
2687      rtx newreg;
2688 {
2689   /* Check that insn was deleted by being converted into a NOTE.  If
2690    so ignore this insn.  */
2691   if (! INSN_P (DF_REF_INSN (ref)))
2692     return 0;
2693 
2694   if (oldreg && oldreg != DF_REF_REG (ref))
2695     abort ();
2696 
2697   if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2698     return 0;
2699 
2700   df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2701   return 1;
2702 }
2703 
2704 
2705 struct ref*
df_bb_def_use_swap(df,bb,def_insn,use_insn,regno)2706 df_bb_def_use_swap (df, bb, def_insn, use_insn, regno)
2707      struct df * df;
2708      basic_block bb;
2709      rtx def_insn;
2710      rtx use_insn;
2711      unsigned int regno;
2712 {
2713   struct ref *def;
2714   struct ref *use;
2715   int def_uid;
2716   int use_uid;
2717   struct df_link *link;
2718 
2719   def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2720   if (! def)
2721     return 0;
2722 
2723   use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2724   if (! use)
2725     return 0;
2726 
2727   /* The USE no longer exists.  */
2728   use_uid = INSN_UID (use_insn);
2729   df_use_unlink (df, use);
2730   df_ref_unlink (&df->insns[use_uid].uses, use);
2731 
2732   /* The DEF requires shifting so remove it from DEF_INSN
2733      and add it to USE_INSN by reusing LINK.  */
2734   def_uid = INSN_UID (def_insn);
2735   link = df_ref_unlink (&df->insns[def_uid].defs, def);
2736   link->ref = def;
2737   link->next = df->insns[use_uid].defs;
2738   df->insns[use_uid].defs = link;
2739 
2740 #if 0
2741   link = df_ref_unlink (&df->regs[regno].defs, def);
2742   link->ref = def;
2743   link->next = df->regs[regno].defs;
2744   df->insns[regno].defs = link;
2745 #endif
2746 
2747   DF_REF_INSN (def) = use_insn;
2748   return def;
2749 }
2750 
2751 
2752 /* Record df between FIRST_INSN and LAST_INSN inclusive.  All new
2753    insns must be processed by this routine.  */
2754 static void
df_insns_modify(df,bb,first_insn,last_insn)2755 df_insns_modify (df, bb, first_insn, last_insn)
2756      struct df *df;
2757      basic_block bb;
2758      rtx first_insn;
2759      rtx last_insn;
2760 {
2761   rtx insn;
2762 
2763   for (insn = first_insn; ; insn = NEXT_INSN (insn))
2764     {
2765       unsigned int uid;
2766 
2767       /* A non-const call should not have slipped through the net.  If
2768 	 it does, we need to create a new basic block.  Ouch.  The
2769 	 same applies for a label.  */
2770       if ((GET_CODE (insn) == CALL_INSN
2771 	   && ! CONST_OR_PURE_CALL_P (insn))
2772 	  || GET_CODE (insn) == CODE_LABEL)
2773 	abort ();
2774 
2775       uid = INSN_UID (insn);
2776 
2777       if (uid >= df->insn_size)
2778 	df_insn_table_realloc (df, uid);
2779 
2780       df_insn_modify (df, bb, insn);
2781 
2782       if (insn == last_insn)
2783 	break;
2784     }
2785 }
2786 
2787 
2788 /* Emit PATTERN before INSN within BB.  */
2789 rtx
df_pattern_emit_before(df,pattern,bb,insn)2790 df_pattern_emit_before (df, pattern, bb, insn)
2791      struct df *df ATTRIBUTE_UNUSED;
2792      rtx pattern;
2793      basic_block bb;
2794      rtx insn;
2795 {
2796   rtx ret_insn;
2797   rtx prev_insn = PREV_INSN (insn);
2798 
2799   /* We should not be inserting before the start of the block.  */
2800   if (insn == bb->head)
2801     abort ();
2802   ret_insn = emit_insn_before (pattern, insn);
2803   if (ret_insn == insn)
2804     return ret_insn;
2805 
2806   df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2807   return ret_insn;
2808 }
2809 
2810 
2811 /* Emit PATTERN after INSN within BB.  */
2812 rtx
df_pattern_emit_after(df,pattern,bb,insn)2813 df_pattern_emit_after (df, pattern, bb, insn)
2814      struct df *df;
2815      rtx pattern;
2816      basic_block bb;
2817      rtx insn;
2818 {
2819   rtx ret_insn;
2820 
2821   ret_insn = emit_insn_after (pattern, insn);
2822   if (ret_insn == insn)
2823     return ret_insn;
2824 
2825   df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2826   return ret_insn;
2827 }
2828 
2829 
2830 /* Emit jump PATTERN after INSN within BB.  */
2831 rtx
df_jump_pattern_emit_after(df,pattern,bb,insn)2832 df_jump_pattern_emit_after (df, pattern, bb, insn)
2833      struct df *df;
2834      rtx pattern;
2835      basic_block bb;
2836      rtx insn;
2837 {
2838   rtx ret_insn;
2839 
2840   ret_insn = emit_jump_insn_after (pattern, insn);
2841   if (ret_insn == insn)
2842     return ret_insn;
2843 
2844   df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2845   return ret_insn;
2846 }
2847 
2848 
2849 /* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2850 
2851    This function should only be used to move loop invariant insns
2852    out of a loop where it has been proven that the def-use info
2853    will still be valid.  */
2854 rtx
df_insn_move_before(df,bb,insn,before_bb,before_insn)2855 df_insn_move_before (df, bb, insn, before_bb, before_insn)
2856      struct df *df;
2857      basic_block bb;
2858      rtx insn;
2859      basic_block before_bb;
2860      rtx before_insn;
2861 {
2862   struct df_link *link;
2863   unsigned int uid;
2864 
2865   if (! bb)
2866     return df_pattern_emit_before (df, insn, before_bb, before_insn);
2867 
2868   uid = INSN_UID (insn);
2869 
2870   /* Change bb for all df defined and used by this insn.  */
2871   for (link = df->insns[uid].defs; link; link = link->next)
2872     DF_REF_BB (link->ref) = before_bb;
2873   for (link = df->insns[uid].uses; link; link = link->next)
2874     DF_REF_BB (link->ref) = before_bb;
2875 
2876   /* The lifetimes of the registers used in this insn will be reduced
2877      while the lifetimes of the registers defined in this insn
2878      are likely to be increased.  */
2879 
2880   /* ???? Perhaps all the insns moved should be stored on a list
2881      which df_analyse removes when it recalculates data flow.  */
2882 
2883   return emit_insn_before (insn, before_insn);
2884 }
2885 
2886 /* Functions to query dataflow information.  */
2887 
2888 
2889 int
df_insn_regno_def_p(df,bb,insn,regno)2890 df_insn_regno_def_p (df, bb, insn, regno)
2891      struct df *df;
2892      basic_block bb ATTRIBUTE_UNUSED;
2893      rtx insn;
2894      unsigned int regno;
2895 {
2896   unsigned int uid;
2897   struct df_link *link;
2898 
2899   uid = INSN_UID (insn);
2900 
2901   for (link = df->insns[uid].defs; link; link = link->next)
2902     {
2903       struct ref *def = link->ref;
2904 
2905       if (DF_REF_REGNO (def) == regno)
2906 	return 1;
2907     }
2908 
2909   return 0;
2910 }
2911 
2912 
2913 static int
df_def_dominates_all_uses_p(df,def)2914 df_def_dominates_all_uses_p (df, def)
2915      struct df *df ATTRIBUTE_UNUSED;
2916      struct ref *def;
2917 {
2918   struct df_link *du_link;
2919 
2920   /* Follow def-use chain to find all the uses of this def.  */
2921   for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2922     {
2923       struct ref *use = du_link->ref;
2924       struct df_link *ud_link;
2925 
2926       /* Follow use-def chain to check all the defs for this use.  */
2927       for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2928 	if (ud_link->ref != def)
2929 	  return 0;
2930     }
2931   return 1;
2932 }
2933 
2934 
2935 int
df_insn_dominates_all_uses_p(df,bb,insn)2936 df_insn_dominates_all_uses_p (df, bb, insn)
2937      struct df *df;
2938      basic_block bb ATTRIBUTE_UNUSED;
2939      rtx insn;
2940 {
2941   unsigned int uid;
2942   struct df_link *link;
2943 
2944   uid = INSN_UID (insn);
2945 
2946   for (link = df->insns[uid].defs; link; link = link->next)
2947     {
2948       struct ref *def = link->ref;
2949 
2950       if (! df_def_dominates_all_uses_p (df, def))
2951 	return 0;
2952     }
2953 
2954   return 1;
2955 }
2956 
2957 
2958 /* Return nonzero if all DF dominates all the uses within the bitmap
2959    BLOCKS.  */
2960 static int
df_def_dominates_uses_p(df,def,blocks)2961 df_def_dominates_uses_p (df, def, blocks)
2962      struct df *df ATTRIBUTE_UNUSED;
2963      struct ref *def;
2964      bitmap blocks;
2965 {
2966   struct df_link *du_link;
2967 
2968   /* Follow def-use chain to find all the uses of this def.  */
2969   for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2970     {
2971       struct ref *use = du_link->ref;
2972       struct df_link *ud_link;
2973 
2974       /* Only worry about the uses within BLOCKS.  For example,
2975       consider a register defined within a loop that is live at the
2976       loop exits.  */
2977       if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
2978 	{
2979 	  /* Follow use-def chain to check all the defs for this use.  */
2980 	  for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2981 	    if (ud_link->ref != def)
2982 	      return 0;
2983 	}
2984     }
2985   return 1;
2986 }
2987 
2988 
2989 /* Return nonzero if all the defs of INSN within BB dominates
2990    all the corresponding uses.  */
2991 int
df_insn_dominates_uses_p(df,bb,insn,blocks)2992 df_insn_dominates_uses_p (df, bb, insn, blocks)
2993      struct df *df;
2994      basic_block bb ATTRIBUTE_UNUSED;
2995      rtx insn;
2996      bitmap blocks;
2997 {
2998   unsigned int uid;
2999   struct df_link *link;
3000 
3001   uid = INSN_UID (insn);
3002 
3003   for (link = df->insns[uid].defs; link; link = link->next)
3004     {
3005       struct ref *def = link->ref;
3006 
3007       /* Only consider the defs within BLOCKS.  */
3008       if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
3009 	  && ! df_def_dominates_uses_p (df, def, blocks))
3010 	return 0;
3011     }
3012   return 1;
3013 }
3014 
3015 
3016 /* Return the basic block that REG referenced in or NULL if referenced
3017    in multiple basic blocks.  */
3018 basic_block
df_regno_bb(df,regno)3019 df_regno_bb (df, regno)
3020      struct df *df;
3021      unsigned int regno;
3022 {
3023   struct df_link *defs = df->regs[regno].defs;
3024   struct df_link *uses = df->regs[regno].uses;
3025   struct ref *def = defs ? defs->ref : 0;
3026   struct ref *use = uses ? uses->ref : 0;
3027   basic_block bb_def = def ? DF_REF_BB (def) : 0;
3028   basic_block bb_use = use ? DF_REF_BB (use) : 0;
3029 
3030   /* Compare blocks of first def and last use.  ???? FIXME.  What if
3031      the reg-def and reg-use lists are not correctly ordered.  */
3032   return bb_def == bb_use ? bb_def : 0;
3033 }
3034 
3035 
3036 /* Return nonzero if REG used in multiple basic blocks.  */
3037 int
df_reg_global_p(df,reg)3038 df_reg_global_p (df, reg)
3039      struct df *df;
3040      rtx reg;
3041 {
3042   return df_regno_bb (df, REGNO (reg)) != 0;
3043 }
3044 
3045 
3046 /* Return total lifetime (in insns) of REG.  */
3047 int
df_reg_lifetime(df,reg)3048 df_reg_lifetime (df, reg)
3049      struct df *df;
3050      rtx reg;
3051 {
3052   return df->regs[REGNO (reg)].lifetime;
3053 }
3054 
3055 
3056 /* Return nonzero if REG live at start of BB.  */
3057 int
df_bb_reg_live_start_p(df,bb,reg)3058 df_bb_reg_live_start_p (df, bb, reg)
3059      struct df *df ATTRIBUTE_UNUSED;
3060      basic_block bb;
3061      rtx reg;
3062 {
3063   struct bb_info *bb_info = DF_BB_INFO (df, bb);
3064 
3065 #ifdef ENABLE_CHECKING
3066   if (! bb_info->lr_in)
3067     abort ();
3068 #endif
3069 
3070   return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3071 }
3072 
3073 
3074 /* Return nonzero if REG live at end of BB.  */
3075 int
df_bb_reg_live_end_p(df,bb,reg)3076 df_bb_reg_live_end_p (df, bb, reg)
3077      struct df *df ATTRIBUTE_UNUSED;
3078      basic_block bb;
3079      rtx reg;
3080 {
3081   struct bb_info *bb_info = DF_BB_INFO (df, bb);
3082 
3083 #ifdef ENABLE_CHECKING
3084   if (! bb_info->lr_in)
3085     abort ();
3086 #endif
3087 
3088   return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3089 }
3090 
3091 
3092 /* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3093    after life of REG2, or 0, if the lives overlap.  */
3094 int
df_bb_regs_lives_compare(df,bb,reg1,reg2)3095 df_bb_regs_lives_compare (df, bb, reg1, reg2)
3096      struct df *df;
3097      basic_block bb;
3098      rtx reg1;
3099      rtx reg2;
3100 {
3101   unsigned int regno1 = REGNO (reg1);
3102   unsigned int regno2 = REGNO (reg2);
3103   struct ref *def1;
3104   struct ref *use1;
3105   struct ref *def2;
3106   struct ref *use2;
3107 
3108 
3109   /* The regs must be local to BB.  */
3110   if (df_regno_bb (df, regno1) != bb
3111       || df_regno_bb (df, regno2) != bb)
3112     abort ();
3113 
3114   def2 = df_bb_regno_first_def_find (df, bb, regno2);
3115   use1 = df_bb_regno_last_use_find (df, bb, regno1);
3116 
3117   if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3118       > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3119     return -1;
3120 
3121   def1 = df_bb_regno_first_def_find (df, bb, regno1);
3122   use2 = df_bb_regno_last_use_find (df, bb, regno2);
3123 
3124   if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3125       > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3126     return 1;
3127 
3128   return 0;
3129 }
3130 
3131 
3132 /* Return last use of REGNO within BB.  */
3133 static struct ref *
df_bb_regno_last_use_find(df,bb,regno)3134 df_bb_regno_last_use_find (df, bb, regno)
3135      struct df * df;
3136      basic_block bb ATTRIBUTE_UNUSED;
3137      unsigned int regno;
3138 {
3139   struct df_link *link;
3140 
3141   /* This assumes that the reg-use list is ordered such that for any
3142      BB, the last use is found first.  However, since the BBs are not
3143      ordered, the first use in the chain is not necessarily the last
3144      use in the function.  */
3145   for (link = df->regs[regno].uses; link; link = link->next)
3146     {
3147       struct ref *use = link->ref;
3148 
3149       if (DF_REF_BB (use) == bb)
3150 	return use;
3151     }
3152   return 0;
3153 }
3154 
3155 
3156 /* Return first def of REGNO within BB.  */
3157 static struct ref *
df_bb_regno_first_def_find(df,bb,regno)3158 df_bb_regno_first_def_find (df, bb, regno)
3159      struct df * df;
3160      basic_block bb ATTRIBUTE_UNUSED;
3161      unsigned int regno;
3162 {
3163   struct df_link *link;
3164 
3165   /* This assumes that the reg-def list is ordered such that for any
3166      BB, the first def is found first.  However, since the BBs are not
3167      ordered, the first def in the chain is not necessarily the first
3168      def in the function.  */
3169   for (link = df->regs[regno].defs; link; link = link->next)
3170     {
3171       struct ref *def = link->ref;
3172 
3173       if (DF_REF_BB (def) == bb)
3174 	return def;
3175     }
3176   return 0;
3177 }
3178 
3179 
3180 /* Return first use of REGNO inside INSN within BB.  */
3181 static struct ref *
df_bb_insn_regno_last_use_find(df,bb,insn,regno)3182 df_bb_insn_regno_last_use_find (df, bb, insn, regno)
3183      struct df * df;
3184      basic_block bb ATTRIBUTE_UNUSED;
3185      rtx insn;
3186      unsigned int regno;
3187 {
3188   unsigned int uid;
3189   struct df_link *link;
3190 
3191   uid = INSN_UID (insn);
3192 
3193   for (link = df->insns[uid].uses; link; link = link->next)
3194     {
3195       struct ref *use = link->ref;
3196 
3197       if (DF_REF_REGNO (use) == regno)
3198 	return use;
3199     }
3200 
3201   return 0;
3202 }
3203 
3204 
3205 /* Return first def of REGNO inside INSN within BB.  */
3206 static struct ref *
df_bb_insn_regno_first_def_find(df,bb,insn,regno)3207 df_bb_insn_regno_first_def_find (df, bb, insn, regno)
3208      struct df * df;
3209      basic_block bb ATTRIBUTE_UNUSED;
3210      rtx insn;
3211      unsigned int regno;
3212 {
3213   unsigned int uid;
3214   struct df_link *link;
3215 
3216   uid = INSN_UID (insn);
3217 
3218   for (link = df->insns[uid].defs; link; link = link->next)
3219     {
3220       struct ref *def = link->ref;
3221 
3222       if (DF_REF_REGNO (def) == regno)
3223 	return def;
3224     }
3225 
3226   return 0;
3227 }
3228 
3229 
3230 /* Return insn using REG if the BB contains only a single
3231    use and def of REG.  */
3232 rtx
df_bb_single_def_use_insn_find(df,bb,insn,reg)3233 df_bb_single_def_use_insn_find (df, bb, insn, reg)
3234      struct df * df;
3235      basic_block bb;
3236      rtx insn;
3237      rtx reg;
3238 {
3239   struct ref *def;
3240   struct ref *use;
3241   struct df_link *du_link;
3242 
3243   def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3244 
3245   if (! def)
3246     abort ();
3247 
3248   du_link = DF_REF_CHAIN (def);
3249 
3250   if (! du_link)
3251     return NULL_RTX;
3252 
3253   use = du_link->ref;
3254 
3255   /* Check if def is dead.  */
3256   if (! use)
3257     return NULL_RTX;
3258 
3259   /* Check for multiple uses.  */
3260   if (du_link->next)
3261     return NULL_RTX;
3262 
3263   return DF_REF_INSN (use);
3264 }
3265 
3266 /* Functions for debugging/dumping dataflow information.  */
3267 
3268 
3269 /* Dump a def-use or use-def chain for REF to FILE.  */
3270 static void
df_chain_dump(link,file)3271 df_chain_dump (link, file)
3272      struct df_link *link;
3273      FILE *file;
3274 {
3275   fprintf (file, "{ ");
3276   for (; link; link = link->next)
3277     {
3278       fprintf (file, "%c%d ",
3279 	       DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3280 	       DF_REF_ID (link->ref));
3281     }
3282   fprintf (file, "}");
3283 }
3284 
3285 static void
df_chain_dump_regno(link,file)3286 df_chain_dump_regno (link, file)
3287      struct df_link *link;
3288      FILE *file;
3289 {
3290   fprintf (file, "{ ");
3291   for (; link; link = link->next)
3292     {
3293       fprintf (file, "%c%d(%d) ",
3294 	       DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3295 	       DF_REF_ID (link->ref),
3296 	       DF_REF_REGNO (link->ref));
3297     }
3298   fprintf (file, "}");
3299 }
3300 
3301 /* Dump dataflow info.  */
3302 void
df_dump(df,flags,file)3303 df_dump (df, flags, file)
3304      struct df *df;
3305      int flags;
3306      FILE *file;
3307 {
3308   unsigned int j;
3309   basic_block bb;
3310 
3311   if (! df || ! file)
3312     return;
3313 
3314   fprintf (file, "\nDataflow summary:\n");
3315   fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3316 	   df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3317 
3318   if (flags & DF_RD)
3319     {
3320       basic_block bb;
3321 
3322       fprintf (file, "Reaching defs:\n");
3323       FOR_EACH_BB (bb)
3324 	{
3325 	  struct bb_info *bb_info = DF_BB_INFO (df, bb);
3326 
3327 	  if (! bb_info->rd_in)
3328 	    continue;
3329 
3330 	  fprintf (file, "bb %d in  \t", bb->index);
3331 	  dump_bitmap (file, bb_info->rd_in);
3332 	  fprintf (file, "bb %d gen \t", bb->index);
3333 	  dump_bitmap (file, bb_info->rd_gen);
3334 	  fprintf (file, "bb %d kill\t", bb->index);
3335 	  dump_bitmap (file, bb_info->rd_kill);
3336 	  fprintf (file, "bb %d out \t", bb->index);
3337 	  dump_bitmap (file, bb_info->rd_out);
3338 	}
3339     }
3340 
3341   if (flags & DF_UD_CHAIN)
3342     {
3343       fprintf (file, "Use-def chains:\n");
3344       for (j = 0; j < df->n_defs; j++)
3345 	{
3346 	  if (df->defs[j])
3347 	    {
3348 	      fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3349 		       j, DF_REF_BBNO (df->defs[j]),
3350 		       DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3351 		       DF_REF_INSN_UID (df->defs[j]),
3352 		       DF_REF_REGNO (df->defs[j]));
3353 	      if (df->defs[j]->flags & DF_REF_READ_WRITE)
3354 		fprintf (file, "read/write ");
3355 	      df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3356 	      fprintf (file, "\n");
3357 	    }
3358 	}
3359     }
3360 
3361   if (flags & DF_RU)
3362     {
3363       fprintf (file, "Reaching uses:\n");
3364       FOR_EACH_BB (bb)
3365 	{
3366 	  struct bb_info *bb_info = DF_BB_INFO (df, bb);
3367 
3368 	  if (! bb_info->ru_in)
3369 	    continue;
3370 
3371 	  fprintf (file, "bb %d in  \t", bb->index);
3372 	  dump_bitmap (file, bb_info->ru_in);
3373 	  fprintf (file, "bb %d gen \t", bb->index);
3374 	  dump_bitmap (file, bb_info->ru_gen);
3375 	  fprintf (file, "bb %d kill\t", bb->index);
3376 	  dump_bitmap (file, bb_info->ru_kill);
3377 	  fprintf (file, "bb %d out \t", bb->index);
3378 	  dump_bitmap (file, bb_info->ru_out);
3379 	}
3380     }
3381 
3382   if (flags & DF_DU_CHAIN)
3383     {
3384       fprintf (file, "Def-use chains:\n");
3385       for (j = 0; j < df->n_uses; j++)
3386 	{
3387 	  if (df->uses[j])
3388 	    {
3389 	      fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3390 		       j, DF_REF_BBNO (df->uses[j]),
3391 		       DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3392 		       DF_REF_INSN_UID (df->uses[j]),
3393 		       DF_REF_REGNO (df->uses[j]));
3394 	      if (df->uses[j]->flags & DF_REF_READ_WRITE)
3395 		fprintf (file, "read/write ");
3396 	      df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3397 	      fprintf (file, "\n");
3398 	    }
3399 	}
3400     }
3401 
3402   if (flags & DF_LR)
3403     {
3404       fprintf (file, "Live regs:\n");
3405       FOR_EACH_BB (bb)
3406 	{
3407 	  struct bb_info *bb_info = DF_BB_INFO (df, bb);
3408 
3409 	  if (! bb_info->lr_in)
3410 	    continue;
3411 
3412 	  fprintf (file, "bb %d in  \t", bb->index);
3413 	  dump_bitmap (file, bb_info->lr_in);
3414 	  fprintf (file, "bb %d use \t", bb->index);
3415 	  dump_bitmap (file, bb_info->lr_use);
3416 	  fprintf (file, "bb %d def \t", bb->index);
3417 	  dump_bitmap (file, bb_info->lr_def);
3418 	  fprintf (file, "bb %d out \t", bb->index);
3419 	  dump_bitmap (file, bb_info->lr_out);
3420 	}
3421     }
3422 
3423   if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3424     {
3425       struct reg_info *reg_info = df->regs;
3426 
3427       fprintf (file, "Register info:\n");
3428       for (j = 0; j < df->n_regs; j++)
3429 	{
3430 	  if (((flags & DF_REG_INFO)
3431 	       && (reg_info[j].n_uses || reg_info[j].n_defs))
3432 	      || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3433 	      || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3434 	    {
3435 	      fprintf (file, "reg %d", j);
3436 	      if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3437 		{
3438 		  basic_block bb = df_regno_bb (df, j);
3439 
3440 		  if (bb)
3441 		    fprintf (file, " bb %d", bb->index);
3442 		  else
3443 		    fprintf (file, " bb ?");
3444 		}
3445 	      if (flags & DF_REG_INFO)
3446 		{
3447 		  fprintf (file, " life %d", reg_info[j].lifetime);
3448 		}
3449 
3450 	      if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3451 		{
3452 		  fprintf (file, " defs ");
3453 		  if (flags & DF_REG_INFO)
3454 		    fprintf (file, "%d ", reg_info[j].n_defs);
3455 		  if (flags & DF_RD_CHAIN)
3456 		    df_chain_dump (reg_info[j].defs, file);
3457 		}
3458 
3459 	      if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3460 		{
3461 		  fprintf (file, " uses ");
3462 		  if (flags & DF_REG_INFO)
3463 		    fprintf (file, "%d ", reg_info[j].n_uses);
3464 		  if (flags & DF_RU_CHAIN)
3465 		    df_chain_dump (reg_info[j].uses, file);
3466 		}
3467 
3468 	      fprintf (file, "\n");
3469 	    }
3470 	}
3471     }
3472   fprintf (file, "\n");
3473 }
3474 
3475 
3476 void
df_insn_debug(df,insn,file)3477 df_insn_debug (df, insn, file)
3478      struct df *df;
3479      rtx insn;
3480      FILE *file;
3481 {
3482   unsigned int uid;
3483   int bbi;
3484 
3485   uid = INSN_UID (insn);
3486   if (uid >= df->insn_size)
3487     return;
3488 
3489   if (df->insns[uid].defs)
3490     bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3491   else if (df->insns[uid].uses)
3492     bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3493   else
3494     bbi = -1;
3495 
3496   fprintf (file, "insn %d bb %d luid %d defs ",
3497 	   uid, bbi, DF_INSN_LUID (df, insn));
3498   df_chain_dump (df->insns[uid].defs, file);
3499   fprintf (file, " uses ");
3500   df_chain_dump (df->insns[uid].uses, file);
3501   fprintf (file, "\n");
3502 }
3503 
3504 void
df_insn_debug_regno(df,insn,file)3505 df_insn_debug_regno (df, insn, file)
3506      struct df *df;
3507      rtx insn;
3508      FILE *file;
3509 {
3510   unsigned int uid;
3511   int bbi;
3512 
3513   uid = INSN_UID (insn);
3514   if (uid >= df->insn_size)
3515     return;
3516 
3517   if (df->insns[uid].defs)
3518     bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3519   else if (df->insns[uid].uses)
3520     bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3521   else
3522     bbi = -1;
3523 
3524   fprintf (file, "insn %d bb %d luid %d defs ",
3525 	   uid, bbi, DF_INSN_LUID (df, insn));
3526   df_chain_dump_regno (df->insns[uid].defs, file);
3527   fprintf (file, " uses ");
3528   df_chain_dump_regno (df->insns[uid].uses, file);
3529   fprintf (file, "\n");
3530 }
3531 
3532 static void
df_regno_debug(df,regno,file)3533 df_regno_debug (df, regno, file)
3534      struct df *df;
3535      unsigned int regno;
3536      FILE *file;
3537 {
3538   if (regno >= df->reg_size)
3539     return;
3540 
3541   fprintf (file, "reg %d life %d defs ",
3542 	   regno, df->regs[regno].lifetime);
3543   df_chain_dump (df->regs[regno].defs, file);
3544   fprintf (file, " uses ");
3545   df_chain_dump (df->regs[regno].uses, file);
3546   fprintf (file, "\n");
3547 }
3548 
3549 
3550 static void
df_ref_debug(df,ref,file)3551 df_ref_debug (df, ref, file)
3552      struct df *df;
3553      struct ref *ref;
3554      FILE *file;
3555 {
3556   fprintf (file, "%c%d ",
3557 	   DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3558 	   DF_REF_ID (ref));
3559   fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3560 	   DF_REF_REGNO (ref),
3561 	   DF_REF_BBNO (ref),
3562 	   DF_INSN_LUID (df, DF_REF_INSN (ref)),
3563 	   INSN_UID (DF_REF_INSN (ref)));
3564   df_chain_dump (DF_REF_CHAIN (ref), file);
3565   fprintf (file, "\n");
3566 }
3567 
3568 
3569 void
debug_df_insn(insn)3570 debug_df_insn (insn)
3571      rtx insn;
3572 {
3573   df_insn_debug (ddf, insn, stderr);
3574   debug_rtx (insn);
3575 }
3576 
3577 
3578 void
debug_df_reg(reg)3579 debug_df_reg (reg)
3580      rtx reg;
3581 {
3582   df_regno_debug (ddf, REGNO (reg), stderr);
3583 }
3584 
3585 
3586 void
debug_df_regno(regno)3587 debug_df_regno (regno)
3588      unsigned int regno;
3589 {
3590   df_regno_debug (ddf, regno, stderr);
3591 }
3592 
3593 
3594 void
debug_df_ref(ref)3595 debug_df_ref (ref)
3596      struct ref *ref;
3597 {
3598   df_ref_debug (ddf, ref, stderr);
3599 }
3600 
3601 
3602 void
debug_df_defno(defno)3603 debug_df_defno (defno)
3604      unsigned int defno;
3605 {
3606   df_ref_debug (ddf, ddf->defs[defno], stderr);
3607 }
3608 
3609 
3610 void
debug_df_useno(defno)3611 debug_df_useno (defno)
3612      unsigned int defno;
3613 {
3614   df_ref_debug (ddf, ddf->uses[defno], stderr);
3615 }
3616 
3617 
3618 void
debug_df_chain(link)3619 debug_df_chain (link)
3620      struct df_link *link;
3621 {
3622   df_chain_dump (link, stderr);
3623   fputc ('\n', stderr);
3624 }
3625 
3626 /* Hybrid search algorithm from "Implementation Techniques for
3627    Efficient Data-Flow Analysis of Large Programs".  */
3628 static void
hybrid_search_bitmap(block,in,out,gen,kill,dir,conf_op,transfun,visited,pending,data)3629 hybrid_search_bitmap (block, in, out, gen, kill, dir,
3630 		      conf_op, transfun, visited, pending,
3631 		      data)
3632      basic_block block;
3633      bitmap *in, *out, *gen, *kill;
3634      enum df_flow_dir dir;
3635      enum df_confluence_op conf_op;
3636      transfer_function_bitmap transfun;
3637      sbitmap visited;
3638      sbitmap pending;
3639      void *data;
3640 {
3641   int changed;
3642   int i = block->index;
3643   edge e;
3644   basic_block bb = block;
3645   SET_BIT (visited, block->index);
3646   if (TEST_BIT (pending, block->index))
3647     {
3648       if (dir == FORWARD)
3649 	{
3650 	  /*  Calculate <conf_op> of predecessor_outs */
3651 	  bitmap_zero (in[i]);
3652 	  for (e = bb->pred; e != 0; e = e->pred_next)
3653 	    {
3654 	      if (e->src == ENTRY_BLOCK_PTR)
3655 		continue;
3656 	      switch (conf_op)
3657 		{
3658 		case UNION:
3659 		  bitmap_a_or_b (in[i], in[i], out[e->src->index]);
3660 		  break;
3661 		case INTERSECTION:
3662 		  bitmap_a_and_b (in[i], in[i], out[e->src->index]);
3663 		  break;
3664 		}
3665 	    }
3666 	}
3667       else
3668 	{
3669 	  /* Calculate <conf_op> of successor ins */
3670 	  bitmap_zero (out[i]);
3671 	  for (e = bb->succ; e != 0; e = e->succ_next)
3672 	    {
3673 	      if (e->dest == EXIT_BLOCK_PTR)
3674 		continue;
3675 	      switch (conf_op)
3676 		{
3677 		case UNION:
3678 		  bitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3679 		  break;
3680 		case INTERSECTION:
3681 		  bitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3682 		  break;
3683 		}
3684 	    }
3685 	}
3686       /* Common part */
3687       (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3688       RESET_BIT (pending, i);
3689       if (changed)
3690 	{
3691 	  if (dir == FORWARD)
3692 	    {
3693 	      for (e = bb->succ; e != 0; e = e->succ_next)
3694 		{
3695 		  if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3696 		    continue;
3697 		  SET_BIT (pending, e->dest->index);
3698 		}
3699 	    }
3700 	  else
3701 	    {
3702 	      for (e = bb->pred; e != 0; e = e->pred_next)
3703 		{
3704 		  if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3705 		    continue;
3706 		  SET_BIT (pending, e->src->index);
3707 		}
3708 	    }
3709 	}
3710     }
3711   if (dir == FORWARD)
3712     {
3713       for (e = bb->succ; e != 0; e = e->succ_next)
3714 	{
3715 	  if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3716 	    continue;
3717 	  if (!TEST_BIT (visited, e->dest->index))
3718 	    hybrid_search_bitmap (e->dest, in, out, gen, kill, dir,
3719 				  conf_op, transfun, visited, pending,
3720 				  data);
3721 	}
3722     }
3723   else
3724     {
3725       for (e = bb->pred; e != 0; e = e->pred_next)
3726 	{
3727 	  if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3728 	    continue;
3729 	  if (!TEST_BIT (visited, e->src->index))
3730 	    hybrid_search_bitmap (e->src, in, out, gen, kill, dir,
3731 				  conf_op, transfun, visited, pending,
3732 				  data);
3733 	}
3734     }
3735 }
3736 
3737 
3738 /* Hybrid search for sbitmaps, rather than bitmaps.  */
3739 static void
hybrid_search_sbitmap(block,in,out,gen,kill,dir,conf_op,transfun,visited,pending,data)3740 hybrid_search_sbitmap (block, in, out, gen, kill, dir,
3741 		       conf_op, transfun, visited, pending,
3742 		       data)
3743      basic_block block;
3744      sbitmap *in, *out, *gen, *kill;
3745      enum df_flow_dir dir;
3746      enum df_confluence_op conf_op;
3747      transfer_function_sbitmap transfun;
3748      sbitmap visited;
3749      sbitmap pending;
3750      void *data;
3751 {
3752   int changed;
3753   int i = block->index;
3754   edge e;
3755   basic_block bb = block;
3756   SET_BIT (visited, block->index);
3757   if (TEST_BIT (pending, block->index))
3758     {
3759       if (dir == FORWARD)
3760 	{
3761 	  /*  Calculate <conf_op> of predecessor_outs */
3762 	  sbitmap_zero (in[i]);
3763 	  for (e = bb->pred; e != 0; e = e->pred_next)
3764 	    {
3765 	      if (e->src == ENTRY_BLOCK_PTR)
3766 		continue;
3767 	      switch (conf_op)
3768 		{
3769 		case UNION:
3770 		  sbitmap_a_or_b (in[i], in[i], out[e->src->index]);
3771 		  break;
3772 		case INTERSECTION:
3773 		  sbitmap_a_and_b (in[i], in[i], out[e->src->index]);
3774 		  break;
3775 		}
3776 	    }
3777 	}
3778       else
3779 	{
3780 	  /* Calculate <conf_op> of successor ins */
3781 	  sbitmap_zero (out[i]);
3782 	  for (e = bb->succ; e != 0; e = e->succ_next)
3783 	    {
3784 	      if (e->dest == EXIT_BLOCK_PTR)
3785 		continue;
3786 	      switch (conf_op)
3787 		{
3788 		case UNION:
3789 		  sbitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3790 		  break;
3791 		case INTERSECTION:
3792 		  sbitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3793 		  break;
3794 		}
3795 	    }
3796 	}
3797       /* Common part */
3798       (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3799       RESET_BIT (pending, i);
3800       if (changed)
3801 	{
3802 	  if (dir == FORWARD)
3803 	    {
3804 	      for (e = bb->succ; e != 0; e = e->succ_next)
3805 		{
3806 		  if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3807 		    continue;
3808 		  SET_BIT (pending, e->dest->index);
3809 		}
3810 	    }
3811 	  else
3812 	    {
3813 	      for (e = bb->pred; e != 0; e = e->pred_next)
3814 		{
3815 		  if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3816 		    continue;
3817 		  SET_BIT (pending, e->src->index);
3818 		}
3819 	    }
3820 	}
3821     }
3822   if (dir == FORWARD)
3823     {
3824       for (e = bb->succ; e != 0; e = e->succ_next)
3825 	{
3826 	  if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3827 	    continue;
3828 	  if (!TEST_BIT (visited, e->dest->index))
3829 	    hybrid_search_sbitmap (e->dest, in, out, gen, kill, dir,
3830 				   conf_op, transfun, visited, pending,
3831 				   data);
3832 	}
3833     }
3834   else
3835     {
3836       for (e = bb->pred; e != 0; e = e->pred_next)
3837 	{
3838 	  if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3839 	    continue;
3840 	  if (!TEST_BIT (visited, e->src->index))
3841 	    hybrid_search_sbitmap (e->src, in, out, gen, kill, dir,
3842 				   conf_op, transfun, visited, pending,
3843 				   data);
3844 	}
3845     }
3846 }
3847 
3848 
3849 
3850 
3851 /* gen = GEN set.
3852    kill = KILL set.
3853    in, out = Filled in by function.
3854    blocks = Blocks to analyze.
3855    dir = Dataflow direction.
3856    conf_op = Confluence operation.
3857    transfun = Transfer function.
3858    order = Order to iterate in. (Should map block numbers -> order)
3859    data = Whatever you want.  It's passed to the transfer function.
3860 
3861    This function will perform iterative bitvector dataflow, producing
3862    the in and out sets.  Even if you only want to perform it for a
3863    small number of blocks, the vectors for in and out must be large
3864    enough for *all* blocks, because changing one block might affect
3865    others.  However, it'll only put what you say to analyze on the
3866    initial worklist.
3867 
3868    For forward problems, you probably want to pass in a mapping of
3869    block number to rc_order (like df->inverse_rc_map).
3870 */
3871 void
iterative_dataflow_sbitmap(in,out,gen,kill,blocks,dir,conf_op,transfun,order,data)3872 iterative_dataflow_sbitmap (in, out, gen, kill, blocks,
3873 			    dir, conf_op, transfun, order, data)
3874      sbitmap *in, *out, *gen, *kill;
3875      bitmap blocks;
3876      enum df_flow_dir dir;
3877      enum df_confluence_op conf_op;
3878      transfer_function_sbitmap transfun;
3879      int *order;
3880      void *data;
3881 {
3882   int i;
3883   fibheap_t worklist;
3884   basic_block bb;
3885   sbitmap visited, pending;
3886   pending = sbitmap_alloc (last_basic_block);
3887   visited = sbitmap_alloc (last_basic_block);
3888   sbitmap_zero (pending);
3889   sbitmap_zero (visited);
3890   worklist = fibheap_new ();
3891   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3892   {
3893     fibheap_insert (worklist, order[i], (void *) (size_t) i);
3894     SET_BIT (pending, i);
3895     if (dir == FORWARD)
3896       sbitmap_copy (out[i], gen[i]);
3897     else
3898       sbitmap_copy (in[i], gen[i]);
3899   });
3900   while (sbitmap_first_set_bit (pending) != -1)
3901     {
3902       while (!fibheap_empty (worklist))
3903 	{
3904 	  i = (size_t) fibheap_extract_min (worklist);
3905 	  bb = BASIC_BLOCK (i);
3906 	  if (!TEST_BIT (visited, bb->index))
3907 	    hybrid_search_sbitmap (bb, in, out, gen, kill, dir,
3908 				   conf_op, transfun, visited, pending, data);
3909 	}
3910       if (sbitmap_first_set_bit (pending) != -1)
3911 	{
3912 	  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3913 	  {
3914 	    fibheap_insert (worklist, order[i], (void *) (size_t) i);
3915 	  });
3916 	  sbitmap_zero (visited);
3917 	}
3918       else
3919 	{
3920 	  break;
3921 	}
3922     }
3923   sbitmap_free (pending);
3924   sbitmap_free (visited);
3925   fibheap_delete (worklist);
3926 }
3927 
3928 /* Exactly the same as iterative_dataflow_sbitmap, except it works on
3929    bitmaps instead */
3930 void
iterative_dataflow_bitmap(in,out,gen,kill,blocks,dir,conf_op,transfun,order,data)3931 iterative_dataflow_bitmap (in, out, gen, kill, blocks,
3932 			   dir, conf_op, transfun, order, data)
3933      bitmap *in, *out, *gen, *kill;
3934      bitmap blocks;
3935      enum df_flow_dir dir;
3936      enum df_confluence_op conf_op;
3937      transfer_function_bitmap transfun;
3938      int *order;
3939      void *data;
3940 {
3941   int i;
3942   fibheap_t worklist;
3943   basic_block bb;
3944   sbitmap visited, pending;
3945   pending = sbitmap_alloc (last_basic_block);
3946   visited = sbitmap_alloc (last_basic_block);
3947   sbitmap_zero (pending);
3948   sbitmap_zero (visited);
3949   worklist = fibheap_new ();
3950   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3951   {
3952     fibheap_insert (worklist, order[i], (void *) (size_t) i);
3953     SET_BIT (pending, i);
3954     if (dir == FORWARD)
3955       bitmap_copy (out[i], gen[i]);
3956     else
3957       bitmap_copy (in[i], gen[i]);
3958   });
3959   while (sbitmap_first_set_bit (pending) != -1)
3960     {
3961       while (!fibheap_empty (worklist))
3962 	{
3963 	  i = (size_t) fibheap_extract_min (worklist);
3964 	  bb = BASIC_BLOCK (i);
3965 	  if (!TEST_BIT (visited, bb->index))
3966 	    hybrid_search_bitmap (bb, in, out, gen, kill, dir,
3967 				  conf_op, transfun, visited, pending, data);
3968 	}
3969       if (sbitmap_first_set_bit (pending) != -1)
3970 	{
3971 	  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3972 	  {
3973 	    fibheap_insert (worklist, order[i], (void *) (size_t) i);
3974 	  });
3975 	  sbitmap_zero (visited);
3976 	}
3977       else
3978 	{
3979 	  break;
3980 	}
3981     }
3982   sbitmap_free (pending);
3983   sbitmap_free (visited);
3984   fibheap_delete (worklist);
3985 }
3986