xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/df-scan.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Scanning of rtl for dataflow analysis.
2    Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3    2008, 2009, 2010 Free Software Foundation, Inc.
4    Originally contributed by Michael P. Hayes
5              (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6    Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7              and Kenneth Zadeck (zadeck@naturalbridge.com).
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "timevar.h"
43 #include "tree.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47 #include "tree-pass.h"
48 
49 DEF_VEC_P(df_ref);
50 DEF_VEC_ALLOC_P_STACK(df_ref);
51 
52 #define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
53 
54 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
55 
56 DEF_VEC_P(df_mw_hardreg_ptr);
57 DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
58 
59 #define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
60   VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
61 
62 #ifndef HAVE_epilogue
63 #define HAVE_epilogue 0
64 #endif
65 #ifndef HAVE_prologue
66 #define HAVE_prologue 0
67 #endif
68 #ifndef HAVE_sibcall_epilogue
69 #define HAVE_sibcall_epilogue 0
70 #endif
71 
72 #ifndef EPILOGUE_USES
73 #define EPILOGUE_USES(REGNO)  0
74 #endif
75 
76 /* The following two macros free the vecs that hold either the refs or
77    the mw refs.  They are a little tricky because the vec has 0
78    elements is special and is not to be freed.  */
79 #define df_scan_free_ref_vec(V) \
80   do { \
81     if (V && *V) \
82       free (V);  \
83   } while (0)
84 
85 #define df_scan_free_mws_vec(V) \
86   do { \
87     if (V && *V) \
88       free (V);  \
89   } while (0)
90 
91 /* The set of hard registers in eliminables[i].from. */
92 
93 static HARD_REG_SET elim_reg_set;
94 
95 /* Initialize ur_in and ur_out as if all hard registers were partially
96    available.  */
97 
98 struct df_collection_rec
99 {
100   VEC(df_ref,stack) *def_vec;
101   VEC(df_ref,stack) *use_vec;
102   VEC(df_ref,stack) *eq_use_vec;
103   VEC(df_mw_hardreg_ptr,stack) *mw_vec;
104 };
105 
106 static df_ref df_null_ref_rec[1];
107 static struct df_mw_hardreg * df_null_mw_rec[1];
108 
109 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
110 			   rtx, rtx *,
111 			   basic_block, struct df_insn_info *,
112 			   enum df_ref_type, int ref_flags,
113 			   int, int, enum machine_mode);
114 static void df_def_record_1 (struct df_collection_rec *, rtx,
115 			     basic_block, struct df_insn_info *,
116 			     int ref_flags);
117 static void df_defs_record (struct df_collection_rec *, rtx,
118 			    basic_block, struct df_insn_info *,
119 			    int ref_flags);
120 static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
121 			    rtx *, enum df_ref_type,
122 			    basic_block, struct df_insn_info *,
123 			    int ref_flags,
124 			    int, int, enum machine_mode);
125 
126 static df_ref df_ref_create_structure (enum df_ref_class,
127 				       struct df_collection_rec *, rtx, rtx *,
128 				       basic_block, struct df_insn_info *,
129 				       enum df_ref_type, int ref_flags,
130 				       int, int, enum machine_mode);
131 
132 static void df_insn_refs_collect (struct df_collection_rec*,
133 				  basic_block, struct df_insn_info *);
134 static void df_canonize_collection_rec (struct df_collection_rec *);
135 
136 static void df_get_regular_block_artificial_uses (bitmap);
137 static void df_get_eh_block_artificial_uses (bitmap);
138 
139 static void df_record_entry_block_defs (bitmap);
140 static void df_record_exit_block_uses (bitmap);
141 static void df_get_exit_block_use_set (bitmap);
142 static void df_get_entry_block_def_set (bitmap);
143 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
144 static void df_ref_chain_delete_du_chain (df_ref *);
145 static void df_ref_chain_delete (df_ref *);
146 
147 static void df_refs_add_to_chains (struct df_collection_rec *,
148 				   basic_block, rtx);
149 
150 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
151 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
152 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
153 static void df_install_ref (df_ref, struct df_reg_info *,
154 			    struct df_ref_info *, bool);
155 
156 static int df_ref_compare (const void *, const void *);
157 static int df_mw_compare (const void *, const void *);
158 
159 /* Indexed by hardware reg number, is true if that register is ever
160    used in the current function.
161 
162    In df-scan.c, this is set up to record the hard regs used
163    explicitly.  Reload adds in the hard regs used for holding pseudo
164    regs.  Final uses it to generate the code in the function prologue
165    and epilogue to save and restore registers as needed.  */
166 
167 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
168 
169 /*----------------------------------------------------------------------------
170    SCANNING DATAFLOW PROBLEM
171 
172    There are several ways in which scanning looks just like the other
173    dataflow problems.  It shares the all the mechanisms for local info
174    as well as basic block info.  Where it differs is when and how often
175    it gets run.  It also has no need for the iterative solver.
176 ----------------------------------------------------------------------------*/
177 
178 /* Problem data for the scanning dataflow function.  */
179 struct df_scan_problem_data
180 {
181   alloc_pool ref_base_pool;
182   alloc_pool ref_artificial_pool;
183   alloc_pool ref_regular_pool;
184   alloc_pool ref_extract_pool;
185   alloc_pool insn_pool;
186   alloc_pool reg_pool;
187   alloc_pool mw_reg_pool;
188   bitmap_obstack reg_bitmaps;
189   bitmap_obstack insn_bitmaps;
190 };
191 
192 typedef struct df_scan_bb_info *df_scan_bb_info_t;
193 
194 
195 /* Internal function to shut down the scanning problem.  */
196 static void
197 df_scan_free_internal (void)
198 {
199   struct df_scan_problem_data *problem_data
200     = (struct df_scan_problem_data *) df_scan->problem_data;
201   unsigned int i;
202   basic_block bb;
203 
204   /* The vectors that hold the refs are not pool allocated because
205      they come in many sizes.  This makes them impossible to delete
206      all at once.  */
207   for (i = 0; i < DF_INSN_SIZE(); i++)
208     {
209       struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
210       /* Skip the insns that have no insn_info or have been
211 	 deleted.  */
212       if (insn_info)
213 	{
214 	  df_scan_free_ref_vec (insn_info->defs);
215 	  df_scan_free_ref_vec (insn_info->uses);
216 	  df_scan_free_ref_vec (insn_info->eq_uses);
217 	  df_scan_free_mws_vec (insn_info->mw_hardregs);
218 	}
219     }
220 
221   FOR_ALL_BB (bb)
222     {
223       unsigned int bb_index = bb->index;
224       struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
225       if (bb_info)
226 	{
227 	  df_scan_free_ref_vec (bb_info->artificial_defs);
228 	  df_scan_free_ref_vec (bb_info->artificial_uses);
229 	}
230     }
231 
232   free (df->def_info.refs);
233   free (df->def_info.begin);
234   free (df->def_info.count);
235   memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
236 
237   free (df->use_info.refs);
238   free (df->use_info.begin);
239   free (df->use_info.count);
240   memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
241 
242   free (df->def_regs);
243   df->def_regs = NULL;
244   free (df->use_regs);
245   df->use_regs = NULL;
246   free (df->eq_use_regs);
247   df->eq_use_regs = NULL;
248   df->regs_size = 0;
249   DF_REG_SIZE(df) = 0;
250 
251   free (df->insns);
252   df->insns = NULL;
253   DF_INSN_SIZE () = 0;
254 
255   free (df_scan->block_info);
256   df_scan->block_info = NULL;
257   df_scan->block_info_size = 0;
258 
259   BITMAP_FREE (df->hardware_regs_used);
260   BITMAP_FREE (df->regular_block_artificial_uses);
261   BITMAP_FREE (df->eh_block_artificial_uses);
262   BITMAP_FREE (df->entry_block_defs);
263   BITMAP_FREE (df->exit_block_uses);
264   BITMAP_FREE (df->insns_to_delete);
265   BITMAP_FREE (df->insns_to_rescan);
266   BITMAP_FREE (df->insns_to_notes_rescan);
267 
268   free_alloc_pool (df_scan->block_pool);
269   free_alloc_pool (problem_data->ref_base_pool);
270   free_alloc_pool (problem_data->ref_artificial_pool);
271   free_alloc_pool (problem_data->ref_regular_pool);
272   free_alloc_pool (problem_data->ref_extract_pool);
273   free_alloc_pool (problem_data->insn_pool);
274   free_alloc_pool (problem_data->reg_pool);
275   free_alloc_pool (problem_data->mw_reg_pool);
276   bitmap_obstack_release (&problem_data->reg_bitmaps);
277   bitmap_obstack_release (&problem_data->insn_bitmaps);
278   free (df_scan->problem_data);
279 }
280 
281 
282 /* Set basic block info.  */
283 
284 static void
285 df_scan_set_bb_info (unsigned int index,
286 		     struct df_scan_bb_info *bb_info)
287 {
288   df_grow_bb_info (df_scan);
289   df_scan->block_info[index] = (void *) bb_info;
290 }
291 
292 
293 /* Free basic block info.  */
294 
295 static void
296 df_scan_free_bb_info (basic_block bb, void *vbb_info)
297 {
298   struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
299   unsigned int bb_index = bb->index;
300   if (bb_info)
301     {
302       rtx insn;
303       FOR_BB_INSNS (bb, insn)
304 	{
305 	  if (INSN_P (insn))
306 	    /* Record defs within INSN.  */
307 	    df_insn_delete (bb, INSN_UID (insn));
308 	}
309 
310       if (bb_index < df_scan->block_info_size)
311 	bb_info = df_scan_get_bb_info (bb_index);
312 
313       /* Get rid of any artificial uses or defs.  */
314       df_ref_chain_delete_du_chain (bb_info->artificial_defs);
315       df_ref_chain_delete_du_chain (bb_info->artificial_uses);
316       df_ref_chain_delete (bb_info->artificial_defs);
317       df_ref_chain_delete (bb_info->artificial_uses);
318       bb_info->artificial_defs = NULL;
319       bb_info->artificial_uses = NULL;
320       pool_free (df_scan->block_pool, bb_info);
321     }
322 }
323 
324 
325 /* Allocate the problem data for the scanning problem.  This should be
326    called when the problem is created or when the entire function is to
327    be rescanned.  */
328 void
329 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
330 {
331   struct df_scan_problem_data *problem_data;
332   unsigned int insn_num = get_max_uid () + 1;
333   unsigned int block_size = 400;
334   basic_block bb;
335 
336   /* Given the number of pools, this is really faster than tearing
337      everything apart.  */
338   if (df_scan->problem_data)
339     df_scan_free_internal ();
340 
341   df_scan->block_pool
342     = create_alloc_pool ("df_scan_block pool",
343 			 sizeof (struct df_scan_bb_info),
344 			 block_size);
345 
346   problem_data = XNEW (struct df_scan_problem_data);
347   df_scan->problem_data = problem_data;
348   df_scan->computed = true;
349 
350   problem_data->ref_base_pool
351     = create_alloc_pool ("df_scan ref base",
352 			 sizeof (struct df_base_ref), block_size);
353   problem_data->ref_artificial_pool
354     = create_alloc_pool ("df_scan ref artificial",
355 			 sizeof (struct df_artificial_ref), block_size);
356   problem_data->ref_regular_pool
357     = create_alloc_pool ("df_scan ref regular",
358 			 sizeof (struct df_regular_ref), block_size);
359   problem_data->ref_extract_pool
360     = create_alloc_pool ("df_scan ref extract",
361 			 sizeof (struct df_extract_ref), block_size);
362   problem_data->insn_pool
363     = create_alloc_pool ("df_scan insn",
364 			 sizeof (struct df_insn_info), block_size);
365   problem_data->reg_pool
366     = create_alloc_pool ("df_scan reg",
367 			 sizeof (struct df_reg_info), block_size);
368   problem_data->mw_reg_pool
369     = create_alloc_pool ("df_scan mw_reg",
370 			 sizeof (struct df_mw_hardreg), block_size);
371 
372   bitmap_obstack_initialize (&problem_data->reg_bitmaps);
373   bitmap_obstack_initialize (&problem_data->insn_bitmaps);
374 
375   insn_num += insn_num / 4;
376   df_grow_reg_info ();
377 
378   df_grow_insn_info ();
379   df_grow_bb_info (df_scan);
380 
381   FOR_ALL_BB (bb)
382     {
383       unsigned int bb_index = bb->index;
384       struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
385       if (!bb_info)
386 	{
387 	  bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
388 	  df_scan_set_bb_info (bb_index, bb_info);
389 	}
390       bb_info->artificial_defs = NULL;
391       bb_info->artificial_uses = NULL;
392     }
393 
394   df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
395   df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
396   df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
397   df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
398   df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
399   df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
400   df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
401   df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
402   df_scan->optional_p = false;
403 }
404 
405 
406 /* Free all of the data associated with the scan problem.  */
407 
408 static void
409 df_scan_free (void)
410 {
411   if (df_scan->problem_data)
412     df_scan_free_internal ();
413 
414   if (df->blocks_to_analyze)
415     {
416       BITMAP_FREE (df->blocks_to_analyze);
417       df->blocks_to_analyze = NULL;
418     }
419 
420   free (df_scan);
421 }
422 
423 /* Dump the preamble for DF_SCAN dump. */
424 static void
425 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
426 {
427   int i;
428   int dcount = 0;
429   int ucount = 0;
430   int ecount = 0;
431   int icount = 0;
432   int ccount = 0;
433   basic_block bb;
434   rtx insn;
435 
436   fprintf (file, ";;  invalidated by call \t");
437   df_print_regset (file, regs_invalidated_by_call_regset);
438   fprintf (file, ";;  hardware regs used \t");
439   df_print_regset (file, df->hardware_regs_used);
440   fprintf (file, ";;  regular block artificial uses \t");
441   df_print_regset (file, df->regular_block_artificial_uses);
442   fprintf (file, ";;  eh block artificial uses \t");
443   df_print_regset (file, df->eh_block_artificial_uses);
444   fprintf (file, ";;  entry block defs \t");
445   df_print_regset (file, df->entry_block_defs);
446   fprintf (file, ";;  exit block uses \t");
447   df_print_regset (file, df->exit_block_uses);
448   fprintf (file, ";;  regs ever live \t");
449   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
450     if (df_regs_ever_live_p (i))
451       fprintf (file, " %d[%s]", i, reg_names[i]);
452   fprintf (file, "\n;;  ref usage \t");
453 
454   for (i = 0; i < (int)df->regs_inited; i++)
455     if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
456       {
457 	const char * sep = "";
458 
459 	fprintf (file, "r%d={", i);
460 	if (DF_REG_DEF_COUNT (i))
461 	  {
462 	    fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
463 	    sep = ",";
464 	    dcount += DF_REG_DEF_COUNT (i);
465 	  }
466 	if (DF_REG_USE_COUNT (i))
467 	  {
468 	    fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
469 	    sep = ",";
470 	    ucount += DF_REG_USE_COUNT (i);
471 	  }
472 	if (DF_REG_EQ_USE_COUNT (i))
473 	  {
474 	    fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
475 	    ecount += DF_REG_EQ_USE_COUNT (i);
476 	  }
477 	fprintf (file, "} ");
478       }
479 
480   FOR_EACH_BB (bb)
481     FOR_BB_INSNS (bb, insn)
482       if (INSN_P (insn))
483 	{
484 	  if (CALL_P (insn))
485 	    ccount++;
486 	  else
487 	    icount++;
488 	}
489 
490   fprintf (file, "\n;;    total ref usage %d{%dd,%du,%de}"
491 		 " in %d{%d regular + %d call} insns.\n",
492 		 dcount + ucount + ecount, dcount, ucount, ecount,
493 		 icount + ccount, icount, ccount);
494 }
495 
496 /* Dump the bb_info for a given basic block. */
497 static void
498 df_scan_start_block (basic_block bb, FILE *file)
499 {
500   struct df_scan_bb_info *bb_info
501     = df_scan_get_bb_info (bb->index);
502 
503   if (bb_info)
504     {
505       fprintf (file, ";; bb %d artificial_defs: ", bb->index);
506       df_refs_chain_dump (bb_info->artificial_defs, true, file);
507       fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
508       df_refs_chain_dump (bb_info->artificial_uses, true, file);
509       fprintf (file, "\n");
510     }
511 #if 0
512   {
513     rtx insn;
514     FOR_BB_INSNS (bb, insn)
515       if (INSN_P (insn))
516 	df_insn_debug (insn, false, file);
517   }
518 #endif
519 }
520 
521 static struct df_problem problem_SCAN =
522 {
523   DF_SCAN,                    /* Problem id.  */
524   DF_NONE,                    /* Direction.  */
525   df_scan_alloc,              /* Allocate the problem specific data.  */
526   NULL,                       /* Reset global information.  */
527   df_scan_free_bb_info,       /* Free basic block info.  */
528   NULL,                       /* Local compute function.  */
529   NULL,                       /* Init the solution specific data.  */
530   NULL,                       /* Iterative solver.  */
531   NULL,                       /* Confluence operator 0.  */
532   NULL,                       /* Confluence operator n.  */
533   NULL,                       /* Transfer function.  */
534   NULL,                       /* Finalize function.  */
535   df_scan_free,               /* Free all of the problem information.  */
536   NULL,                       /* Remove this problem from the stack of dataflow problems.  */
537   df_scan_start_dump,         /* Debugging.  */
538   df_scan_start_block,        /* Debugging start block.  */
539   NULL,                       /* Debugging end block.  */
540   NULL,                       /* Incremental solution verify start.  */
541   NULL,                       /* Incremental solution verify end.  */
542   NULL,                       /* Dependent problem.  */
543   TV_DF_SCAN,                 /* Timing variable.  */
544   false                       /* Reset blocks on dropping out of blocks_to_analyze.  */
545 };
546 
547 
548 /* Create a new DATAFLOW instance and add it to an existing instance
549    of DF.  The returned structure is what is used to get at the
550    solution.  */
551 
552 void
553 df_scan_add_problem (void)
554 {
555   df_add_problem (&problem_SCAN);
556 }
557 
558 
559 /*----------------------------------------------------------------------------
560    Storage Allocation Utilities
561 ----------------------------------------------------------------------------*/
562 
563 
564 /* First, grow the reg_info information.  If the current size is less than
565    the number of pseudos, grow to 25% more than the number of
566    pseudos.
567 
568    Second, assure that all of the slots up to max_reg_num have been
569    filled with reg_info structures.  */
570 
571 void
572 df_grow_reg_info (void)
573 {
574   unsigned int max_reg = max_reg_num ();
575   unsigned int new_size = max_reg;
576   struct df_scan_problem_data *problem_data
577     = (struct df_scan_problem_data *) df_scan->problem_data;
578   unsigned int i;
579 
580   if (df->regs_size < new_size)
581     {
582       new_size += new_size / 4;
583       df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
584       df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
585       df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
586 				    new_size);
587       df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
588       df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
589       df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
590       df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
591       df->regs_size = new_size;
592     }
593 
594   for (i = df->regs_inited; i < max_reg; i++)
595     {
596       struct df_reg_info *reg_info;
597 
598       reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
599       memset (reg_info, 0, sizeof (struct df_reg_info));
600       df->def_regs[i] = reg_info;
601       reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
602       memset (reg_info, 0, sizeof (struct df_reg_info));
603       df->use_regs[i] = reg_info;
604       reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
605       memset (reg_info, 0, sizeof (struct df_reg_info));
606       df->eq_use_regs[i] = reg_info;
607       df->def_info.begin[i] = 0;
608       df->def_info.count[i] = 0;
609       df->use_info.begin[i] = 0;
610       df->use_info.count[i] = 0;
611     }
612 
613   df->regs_inited = max_reg;
614 }
615 
616 
617 /* Grow the ref information.  */
618 
619 static void
620 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
621 {
622   if (ref_info->refs_size < new_size)
623     {
624       ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
625       memset (ref_info->refs + ref_info->refs_size, 0,
626 	      (new_size - ref_info->refs_size) *sizeof (df_ref));
627       ref_info->refs_size = new_size;
628     }
629 }
630 
631 
632 /* Check and grow the ref information if necessary.  This routine
633    guarantees total_size + BITMAP_ADDEND amount of entries in refs
634    array.  It updates ref_info->refs_size only and does not change
635    ref_info->total_size.  */
636 
637 static void
638 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
639 			    unsigned bitmap_addend)
640 {
641   if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
642     {
643       int new_size = ref_info->total_size + bitmap_addend;
644       new_size += ref_info->total_size / 4;
645       df_grow_ref_info (ref_info, new_size);
646     }
647 }
648 
649 
650 /* Grow the ref information.  If the current size is less than the
651    number of instructions, grow to 25% more than the number of
652    instructions.  */
653 
654 void
655 df_grow_insn_info (void)
656 {
657   unsigned int new_size = get_max_uid () + 1;
658   if (DF_INSN_SIZE () < new_size)
659     {
660       new_size += new_size / 4;
661       df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
662       memset (df->insns + df->insns_size, 0,
663 	      (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
664       DF_INSN_SIZE () = new_size;
665     }
666 }
667 
668 
669 
670 
671 /*----------------------------------------------------------------------------
672    PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
673 ----------------------------------------------------------------------------*/
674 
675 /* Rescan all of the block_to_analyze or all of the blocks in the
676    function if df_set_blocks if blocks_to_analyze is NULL;  */
677 
678 void
679 df_scan_blocks (void)
680 {
681   basic_block bb;
682 
683   df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
684   df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
685 
686   df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
687   df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
688 
689   bitmap_ior_into (df->eh_block_artificial_uses,
690 		   df->regular_block_artificial_uses);
691 
692   /* ENTRY and EXIT blocks have special defs/uses.  */
693   df_get_entry_block_def_set (df->entry_block_defs);
694   df_record_entry_block_defs (df->entry_block_defs);
695   df_get_exit_block_use_set (df->exit_block_uses);
696   df_record_exit_block_uses (df->exit_block_uses);
697   df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
698   df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
699 
700   /* Regular blocks */
701   FOR_EACH_BB (bb)
702     {
703       unsigned int bb_index = bb->index;
704       df_bb_refs_record (bb_index, true);
705     }
706 }
707 
708 
709 /* Create a new ref of type DF_REF_TYPE for register REG at address
710    LOC within INSN of BB.  This function is only used externally.
711 
712    If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
713    DF_REF_ZERO_EXTRACT.  WIDTH, OFFSET and MODE are used to access the
714    fields if they were constants.  Otherwise they should be -1 if
715    those flags were set.  */
716 
717 df_ref
718 df_ref_create (rtx reg, rtx *loc, rtx insn,
719 	       basic_block bb,
720 	       enum df_ref_type ref_type,
721 	       int ref_flags,
722 	       int width, int offset, enum machine_mode mode)
723 {
724   df_ref ref;
725   struct df_reg_info **reg_info;
726   struct df_ref_info *ref_info;
727   df_ref *ref_rec;
728   df_ref **ref_rec_ptr;
729   unsigned int count = 0;
730   bool add_to_table;
731   enum df_ref_class cl;
732 
733   df_grow_reg_info ();
734 
735   /* You cannot hack artificial refs.  */
736   gcc_assert (insn);
737 
738   if (width != -1 || offset != -1)
739     cl = DF_REF_EXTRACT;
740   else if (loc)
741     cl = DF_REF_REGULAR;
742   else
743     cl = DF_REF_BASE;
744   ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
745                                  ref_type, ref_flags,
746 				 width, offset, mode);
747 
748   if (DF_REF_REG_DEF_P (ref))
749     {
750       reg_info = df->def_regs;
751       ref_info = &df->def_info;
752       ref_rec_ptr = &DF_INSN_DEFS (insn);
753       add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
754     }
755   else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
756     {
757       reg_info = df->eq_use_regs;
758       ref_info = &df->use_info;
759       ref_rec_ptr = &DF_INSN_EQ_USES (insn);
760       switch (ref_info->ref_order)
761 	{
762 	case DF_REF_ORDER_UNORDERED_WITH_NOTES:
763 	case DF_REF_ORDER_BY_REG_WITH_NOTES:
764 	case DF_REF_ORDER_BY_INSN_WITH_NOTES:
765 	  add_to_table = true;
766 	  break;
767 	default:
768 	  add_to_table = false;
769 	  break;
770 	}
771     }
772   else
773     {
774       reg_info = df->use_regs;
775       ref_info = &df->use_info;
776       ref_rec_ptr = &DF_INSN_USES (insn);
777       add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
778     }
779 
780   /* Do not add if ref is not in the right blocks.  */
781   if (add_to_table && df->analyze_subset)
782     add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
783 
784   df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
785 
786   if (add_to_table)
787     switch (ref_info->ref_order)
788       {
789       case DF_REF_ORDER_UNORDERED_WITH_NOTES:
790       case DF_REF_ORDER_BY_REG_WITH_NOTES:
791       case DF_REF_ORDER_BY_INSN_WITH_NOTES:
792 	ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
793 	break;
794       default:
795 	ref_info->ref_order = DF_REF_ORDER_UNORDERED;
796 	break;
797       }
798 
799   ref_rec = *ref_rec_ptr;
800   while (*ref_rec)
801     {
802       count++;
803       ref_rec++;
804     }
805 
806   ref_rec = *ref_rec_ptr;
807   if (count)
808     {
809       ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
810       *ref_rec_ptr = ref_rec;
811       ref_rec[count] = ref;
812       ref_rec[count+1] = NULL;
813       qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
814     }
815   else
816     {
817       df_ref *ref_rec = XNEWVEC (df_ref, 2);
818       ref_rec[0] = ref;
819       ref_rec[1] = NULL;
820       *ref_rec_ptr = ref_rec;
821     }
822 
823 #if 0
824   if (dump_file)
825     {
826       fprintf (dump_file, "adding ref ");
827       df_ref_debug (ref, dump_file);
828     }
829 #endif
830   /* By adding the ref directly, df_insn_rescan my not find any
831      differences even though the block will have changed.  So we need
832      to mark the block dirty ourselves.  */
833   if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
834     df_set_bb_dirty (bb);
835 
836   return ref;
837 }
838 
839 
840 
841 /*----------------------------------------------------------------------------
842    UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
843 ----------------------------------------------------------------------------*/
844 
845 static void
846 df_free_ref (df_ref ref)
847 {
848   struct df_scan_problem_data *problem_data
849     = (struct df_scan_problem_data *) df_scan->problem_data;
850 
851   switch (DF_REF_CLASS (ref))
852     {
853     case DF_REF_BASE:
854       pool_free (problem_data->ref_base_pool, ref);
855       break;
856 
857     case DF_REF_ARTIFICIAL:
858       pool_free (problem_data->ref_artificial_pool, ref);
859       break;
860 
861     case DF_REF_REGULAR:
862       pool_free (problem_data->ref_regular_pool, ref);
863       break;
864 
865     case DF_REF_EXTRACT:
866       pool_free (problem_data->ref_extract_pool, ref);
867       break;
868     }
869 }
870 
871 
872 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
873    Also delete the def-use or use-def chain if it exists.  */
874 
875 static void
876 df_reg_chain_unlink (df_ref ref)
877 {
878   df_ref next = DF_REF_NEXT_REG (ref);
879   df_ref prev = DF_REF_PREV_REG (ref);
880   int id = DF_REF_ID (ref);
881   struct df_reg_info *reg_info;
882   df_ref *refs = NULL;
883 
884   if (DF_REF_REG_DEF_P (ref))
885     {
886       int regno = DF_REF_REGNO (ref);
887       reg_info = DF_REG_DEF_GET (regno);
888       refs = df->def_info.refs;
889     }
890   else
891     {
892       if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
893 	{
894 	  reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
895 	  switch (df->use_info.ref_order)
896 	    {
897 	    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
898 	    case DF_REF_ORDER_BY_REG_WITH_NOTES:
899 	    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
900 	      refs = df->use_info.refs;
901 	      break;
902 	    default:
903 	      break;
904 	    }
905 	}
906       else
907 	{
908 	  reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
909 	  refs = df->use_info.refs;
910 	}
911     }
912 
913   if (refs)
914     {
915       if (df->analyze_subset)
916 	{
917 	  if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
918 	    refs[id] = NULL;
919 	}
920       else
921 	refs[id] = NULL;
922     }
923 
924   /* Delete any def-use or use-def chains that start here. It is
925      possible that there is trash in this field.  This happens for
926      insns that have been deleted when rescanning has been deferred
927      and the chain problem has also been deleted.  The chain tear down
928      code skips deleted insns.  */
929   if (df_chain && DF_REF_CHAIN (ref))
930     df_chain_unlink (ref);
931 
932   reg_info->n_refs--;
933   if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
934     {
935       gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
936       df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
937     }
938 
939   /* Unlink from the reg chain.  If there is no prev, this is the
940      first of the list.  If not, just join the next and prev.  */
941   if (prev)
942     DF_REF_NEXT_REG (prev) = next;
943   else
944     {
945       gcc_assert (reg_info->reg_chain == ref);
946       reg_info->reg_chain = next;
947     }
948   if (next)
949     DF_REF_PREV_REG (next) = prev;
950 
951   df_free_ref (ref);
952 }
953 
954 
955 /* Remove REF from VEC.  */
956 
957 static void
958 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
959 {
960   df_ref *vec = *vec_ptr;
961 
962   if (vec[1])
963     {
964       while (*vec && *vec != ref)
965 	vec++;
966 
967       while (*vec)
968 	{
969 	  *vec = *(vec+1);
970 	  vec++;
971 	}
972     }
973   else
974     {
975       free (vec);
976       *vec_ptr = df_null_ref_rec;
977     }
978 }
979 
980 
981 /* Unlink REF from all def-use/use-def chains, etc.  */
982 
983 void
984 df_ref_remove (df_ref ref)
985 {
986 #if 0
987   if (dump_file)
988     {
989       fprintf (dump_file, "removing ref ");
990       df_ref_debug (ref, dump_file);
991     }
992 #endif
993 
994   if (DF_REF_REG_DEF_P (ref))
995     {
996       if (DF_REF_IS_ARTIFICIAL (ref))
997 	{
998 	  struct df_scan_bb_info *bb_info
999 	    = df_scan_get_bb_info (DF_REF_BBNO (ref));
1000 	  df_ref_compress_rec (&bb_info->artificial_defs, ref);
1001 	}
1002       else
1003 	{
1004 	  unsigned int uid = DF_REF_INSN_UID (ref);
1005 	  struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1006 	  df_ref_compress_rec (&insn_rec->defs, ref);
1007 	}
1008     }
1009   else
1010     {
1011       if (DF_REF_IS_ARTIFICIAL (ref))
1012 	{
1013 	  struct df_scan_bb_info *bb_info
1014 	    = df_scan_get_bb_info (DF_REF_BBNO (ref));
1015 	  df_ref_compress_rec (&bb_info->artificial_uses, ref);
1016 	}
1017       else
1018 	{
1019 	  unsigned int uid = DF_REF_INSN_UID (ref);
1020 	  struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1021 
1022 	  if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1023 	    df_ref_compress_rec (&insn_rec->eq_uses, ref);
1024 	  else
1025 	    df_ref_compress_rec (&insn_rec->uses, ref);
1026 	}
1027     }
1028 
1029   /* By deleting the ref directly, df_insn_rescan my not find any
1030      differences even though the block will have changed.  So we need
1031      to mark the block dirty ourselves.  */
1032   if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
1033     df_set_bb_dirty (DF_REF_BB (ref));
1034   df_reg_chain_unlink (ref);
1035 }
1036 
1037 
1038 /* Create the insn record for INSN.  If there was one there, zero it
1039    out.  */
1040 
1041 struct df_insn_info *
1042 df_insn_create_insn_record (rtx insn)
1043 {
1044   struct df_scan_problem_data *problem_data
1045     = (struct df_scan_problem_data *) df_scan->problem_data;
1046   struct df_insn_info *insn_rec;
1047 
1048   df_grow_insn_info ();
1049   insn_rec = DF_INSN_INFO_GET (insn);
1050   if (!insn_rec)
1051     {
1052       insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1053       DF_INSN_INFO_SET (insn, insn_rec);
1054     }
1055   memset (insn_rec, 0, sizeof (struct df_insn_info));
1056   insn_rec->insn = insn;
1057   return insn_rec;
1058 }
1059 
1060 
1061 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain.  */
1062 
1063 static void
1064 df_ref_chain_delete_du_chain (df_ref *ref_rec)
1065 {
1066   while (*ref_rec)
1067     {
1068       df_ref ref = *ref_rec;
1069       /* CHAIN is allocated by DF_CHAIN. So make sure to
1070          pass df_scan instance for the problem.  */
1071       if (DF_REF_CHAIN (ref))
1072         df_chain_unlink (ref);
1073       ref_rec++;
1074     }
1075 }
1076 
1077 
1078 /* Delete all refs in the ref chain.  */
1079 
1080 static void
1081 df_ref_chain_delete (df_ref *ref_rec)
1082 {
1083   df_ref *start = ref_rec;
1084   while (*ref_rec)
1085     {
1086       df_reg_chain_unlink (*ref_rec);
1087       ref_rec++;
1088     }
1089 
1090   /* If the list is empty, it has a special shared element that is not
1091      to be deleted.  */
1092   if (*start)
1093     free (start);
1094 }
1095 
1096 
1097 /* Delete the hardreg chain.  */
1098 
1099 static void
1100 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1101 {
1102   struct df_scan_problem_data *problem_data;
1103 
1104   if (!hardregs)
1105     return;
1106 
1107   problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1108 
1109   while (*hardregs)
1110     {
1111       pool_free (problem_data->mw_reg_pool, *hardregs);
1112       hardregs++;
1113     }
1114 }
1115 
1116 
1117 /* Delete all of the refs information from INSN.  BB must be passed in
1118    except when called from df_process_deferred_rescans to mark the block
1119    as dirty.  */
1120 
1121 void
1122 df_insn_delete (basic_block bb, unsigned int uid)
1123 {
1124   struct df_insn_info *insn_info = NULL;
1125   if (!df)
1126     return;
1127 
1128   df_grow_bb_info (df_scan);
1129   df_grow_reg_info ();
1130 
1131   /* The block must be marked as dirty now, rather than later as in
1132      df_insn_rescan and df_notes_rescan because it may not be there at
1133      rescanning time and the mark would blow up.  */
1134   if (bb)
1135     df_set_bb_dirty (bb);
1136 
1137   insn_info = DF_INSN_UID_SAFE_GET (uid);
1138 
1139   /* The client has deferred rescanning.  */
1140   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1141     {
1142       if (insn_info)
1143 	{
1144 	  bitmap_clear_bit (df->insns_to_rescan, uid);
1145 	  bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1146 	  bitmap_set_bit (df->insns_to_delete, uid);
1147 	}
1148       if (dump_file)
1149 	fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1150       return;
1151     }
1152 
1153   if (dump_file)
1154     fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1155 
1156   bitmap_clear_bit (df->insns_to_delete, uid);
1157   bitmap_clear_bit (df->insns_to_rescan, uid);
1158   bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1159   if (insn_info)
1160     {
1161       struct df_scan_problem_data *problem_data
1162 	= (struct df_scan_problem_data *) df_scan->problem_data;
1163 
1164       /* In general, notes do not have the insn_info fields
1165 	 initialized.  However, combine deletes insns by changing them
1166 	 to notes.  How clever.  So we cannot just check if it is a
1167 	 valid insn before short circuiting this code, we need to see
1168 	 if we actually initialized it.  */
1169       if (insn_info->defs)
1170 	{
1171 	  df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1172 
1173 	  if (df_chain)
1174 	    {
1175 	      df_ref_chain_delete_du_chain (insn_info->defs);
1176 	      df_ref_chain_delete_du_chain (insn_info->uses);
1177 	      df_ref_chain_delete_du_chain (insn_info->eq_uses);
1178 	    }
1179 
1180 	  df_ref_chain_delete (insn_info->defs);
1181 	  df_ref_chain_delete (insn_info->uses);
1182 	  df_ref_chain_delete (insn_info->eq_uses);
1183 	}
1184       pool_free (problem_data->insn_pool, insn_info);
1185       DF_INSN_UID_SET (uid, NULL);
1186     }
1187 }
1188 
1189 
1190 /* Free all of the refs and the mw_hardregs in COLLECTION_REC.  */
1191 
1192 static void
1193 df_free_collection_rec (struct df_collection_rec *collection_rec)
1194 {
1195   unsigned int ix;
1196   struct df_scan_problem_data *problem_data
1197     = (struct df_scan_problem_data *) df_scan->problem_data;
1198   df_ref ref;
1199   struct df_mw_hardreg *mw;
1200 
1201   for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
1202     df_free_ref (ref);
1203   for (ix = 0; VEC_iterate (df_ref, collection_rec->use_vec, ix, ref); ++ix)
1204     df_free_ref (ref);
1205   for (ix = 0; VEC_iterate (df_ref, collection_rec->eq_use_vec, ix, ref); ++ix)
1206     df_free_ref (ref);
1207   for (ix = 0;
1208        VEC_iterate (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw);
1209        ++ix)
1210     pool_free (problem_data->mw_reg_pool, mw);
1211 
1212   VEC_free (df_ref, stack, collection_rec->def_vec);
1213   VEC_free (df_ref, stack, collection_rec->use_vec);
1214   VEC_free (df_ref, stack, collection_rec->eq_use_vec);
1215   VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
1216 }
1217 
1218 /* Rescan INSN.  Return TRUE if the rescanning produced any changes.  */
1219 
1220 bool
1221 df_insn_rescan (rtx insn)
1222 {
1223   unsigned int uid = INSN_UID (insn);
1224   struct df_insn_info *insn_info = NULL;
1225   basic_block bb = BLOCK_FOR_INSN (insn);
1226   struct df_collection_rec collection_rec;
1227 
1228   if ((!df) || (!INSN_P (insn)))
1229     return false;
1230 
1231   if (!bb)
1232     {
1233       if (dump_file)
1234 	fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1235       return false;
1236     }
1237 
1238   /* The client has disabled rescanning and plans to do it itself.  */
1239   if (df->changeable_flags & DF_NO_INSN_RESCAN)
1240     return false;
1241 
1242   df_grow_bb_info (df_scan);
1243   df_grow_reg_info ();
1244 
1245   insn_info = DF_INSN_UID_SAFE_GET (uid);
1246 
1247   /* The client has deferred rescanning.  */
1248   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1249     {
1250       if (!insn_info)
1251 	{
1252 	  insn_info = df_insn_create_insn_record (insn);
1253 	  insn_info->defs = df_null_ref_rec;
1254 	  insn_info->uses = df_null_ref_rec;
1255 	  insn_info->eq_uses = df_null_ref_rec;
1256 	  insn_info->mw_hardregs = df_null_mw_rec;
1257 	}
1258       if (dump_file)
1259 	fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1260 
1261       bitmap_clear_bit (df->insns_to_delete, uid);
1262       bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1263       bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1264       return false;
1265     }
1266 
1267   collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
1268   collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
1269   collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
1270   collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
1271 
1272   bitmap_clear_bit (df->insns_to_delete, uid);
1273   bitmap_clear_bit (df->insns_to_rescan, uid);
1274   bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1275   if (insn_info)
1276     {
1277       int luid;
1278       bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1279       /* If there's no change, return false. */
1280       if (the_same)
1281 	{
1282 	  df_free_collection_rec (&collection_rec);
1283 	  if (dump_file)
1284 	    fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1285 	  return false;
1286 	}
1287       if (dump_file)
1288 	fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1289 
1290       /* There's change - we need to delete the existing info.
1291 	 Since the insn isn't moved, we can salvage its LUID.  */
1292       luid = DF_INSN_LUID (insn);
1293       df_insn_delete (NULL, uid);
1294       df_insn_create_insn_record (insn);
1295       DF_INSN_LUID (insn) = luid;
1296     }
1297   else
1298     {
1299       struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1300       df_insn_refs_collect (&collection_rec, bb, insn_info);
1301       if (dump_file)
1302 	fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1303     }
1304 
1305   df_refs_add_to_chains (&collection_rec, bb, insn);
1306   if (DEBUG_INSN_P (insn))
1307     df_set_bb_dirty_nonlr (bb);
1308   else
1309     df_set_bb_dirty (bb);
1310 
1311   VEC_free (df_ref, stack, collection_rec.def_vec);
1312   VEC_free (df_ref, stack, collection_rec.use_vec);
1313   VEC_free (df_ref, stack, collection_rec.eq_use_vec);
1314   VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
1315 
1316   return true;
1317 }
1318 
1319 /* Same as df_insn_rescan, but don't mark the basic block as
1320    dirty.  */
1321 
1322 bool
1323 df_insn_rescan_debug_internal (rtx insn)
1324 {
1325   unsigned int uid = INSN_UID (insn);
1326   struct df_insn_info *insn_info;
1327 
1328   gcc_assert (DEBUG_INSN_P (insn)
1329 	      && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1330 
1331   if (!df)
1332     return false;
1333 
1334   insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1335   if (!insn_info)
1336     return false;
1337 
1338   if (dump_file)
1339     fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1340 
1341   bitmap_clear_bit (df->insns_to_delete, uid);
1342   bitmap_clear_bit (df->insns_to_rescan, uid);
1343   bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1344 
1345   if (!insn_info->defs)
1346     return false;
1347 
1348   if (insn_info->defs == df_null_ref_rec
1349       && insn_info->uses == df_null_ref_rec
1350       && insn_info->eq_uses == df_null_ref_rec
1351       && insn_info->mw_hardregs == df_null_mw_rec)
1352     return false;
1353 
1354   df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1355 
1356   if (df_chain)
1357     {
1358       df_ref_chain_delete_du_chain (insn_info->defs);
1359       df_ref_chain_delete_du_chain (insn_info->uses);
1360       df_ref_chain_delete_du_chain (insn_info->eq_uses);
1361     }
1362 
1363   df_ref_chain_delete (insn_info->defs);
1364   df_ref_chain_delete (insn_info->uses);
1365   df_ref_chain_delete (insn_info->eq_uses);
1366 
1367   insn_info->defs = df_null_ref_rec;
1368   insn_info->uses = df_null_ref_rec;
1369   insn_info->eq_uses = df_null_ref_rec;
1370   insn_info->mw_hardregs = df_null_mw_rec;
1371 
1372   return true;
1373 }
1374 
1375 
1376 /* Rescan all of the insns in the function.  Note that the artificial
1377    uses and defs are not touched.  This function will destroy def-se
1378    or use-def chains.  */
1379 
1380 void
1381 df_insn_rescan_all (void)
1382 {
1383   bool no_insn_rescan = false;
1384   bool defer_insn_rescan = false;
1385   basic_block bb;
1386   bitmap_iterator bi;
1387   unsigned int uid;
1388   bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1389 
1390   if (df->changeable_flags & DF_NO_INSN_RESCAN)
1391     {
1392       df_clear_flags (DF_NO_INSN_RESCAN);
1393       no_insn_rescan = true;
1394     }
1395 
1396   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1397     {
1398       df_clear_flags (DF_DEFER_INSN_RESCAN);
1399       defer_insn_rescan = true;
1400     }
1401 
1402   bitmap_copy (tmp, df->insns_to_delete);
1403   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1404     {
1405       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1406       if (insn_info)
1407 	df_insn_delete (NULL, uid);
1408     }
1409 
1410   BITMAP_FREE (tmp);
1411   bitmap_clear (df->insns_to_delete);
1412   bitmap_clear (df->insns_to_rescan);
1413   bitmap_clear (df->insns_to_notes_rescan);
1414 
1415   FOR_EACH_BB (bb)
1416     {
1417       rtx insn;
1418       FOR_BB_INSNS (bb, insn)
1419 	{
1420 	  df_insn_rescan (insn);
1421 	}
1422     }
1423 
1424   if (no_insn_rescan)
1425     df_set_flags (DF_NO_INSN_RESCAN);
1426   if (defer_insn_rescan)
1427     df_set_flags (DF_DEFER_INSN_RESCAN);
1428 }
1429 
1430 
1431 /* Process all of the deferred rescans or deletions.  */
1432 
1433 void
1434 df_process_deferred_rescans (void)
1435 {
1436   bool no_insn_rescan = false;
1437   bool defer_insn_rescan = false;
1438   bitmap_iterator bi;
1439   unsigned int uid;
1440   bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1441 
1442   if (df->changeable_flags & DF_NO_INSN_RESCAN)
1443     {
1444       df_clear_flags (DF_NO_INSN_RESCAN);
1445       no_insn_rescan = true;
1446     }
1447 
1448   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1449     {
1450       df_clear_flags (DF_DEFER_INSN_RESCAN);
1451       defer_insn_rescan = true;
1452     }
1453 
1454   if (dump_file)
1455     fprintf (dump_file, "starting the processing of deferred insns\n");
1456 
1457   bitmap_copy (tmp, df->insns_to_delete);
1458   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1459     {
1460       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1461       if (insn_info)
1462 	df_insn_delete (NULL, uid);
1463     }
1464 
1465   bitmap_copy (tmp, df->insns_to_rescan);
1466   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1467     {
1468       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1469       if (insn_info)
1470 	df_insn_rescan (insn_info->insn);
1471     }
1472 
1473   bitmap_copy (tmp, df->insns_to_notes_rescan);
1474   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1475     {
1476       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1477       if (insn_info)
1478 	df_notes_rescan (insn_info->insn);
1479     }
1480 
1481   if (dump_file)
1482     fprintf (dump_file, "ending the processing of deferred insns\n");
1483 
1484   BITMAP_FREE (tmp);
1485   bitmap_clear (df->insns_to_delete);
1486   bitmap_clear (df->insns_to_rescan);
1487   bitmap_clear (df->insns_to_notes_rescan);
1488 
1489   if (no_insn_rescan)
1490     df_set_flags (DF_NO_INSN_RESCAN);
1491   if (defer_insn_rescan)
1492     df_set_flags (DF_DEFER_INSN_RESCAN);
1493 
1494   /* If someone changed regs_ever_live during this pass, fix up the
1495      entry and exit blocks.  */
1496   if (df->redo_entry_and_exit)
1497     {
1498       df_update_entry_exit_and_calls ();
1499       df->redo_entry_and_exit = false;
1500     }
1501 }
1502 
1503 
1504 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1505    the uses if INCLUDE_USES. Include the eq_uses if
1506    INCLUDE_EQ_USES.  */
1507 
1508 static unsigned int
1509 df_count_refs (bool include_defs, bool include_uses,
1510 	       bool include_eq_uses)
1511 {
1512   unsigned int regno;
1513   int size = 0;
1514   unsigned int m = df->regs_inited;
1515 
1516   for (regno = 0; regno < m; regno++)
1517     {
1518       if (include_defs)
1519 	size += DF_REG_DEF_COUNT (regno);
1520       if (include_uses)
1521 	size += DF_REG_USE_COUNT (regno);
1522       if (include_eq_uses)
1523 	size += DF_REG_EQ_USE_COUNT (regno);
1524     }
1525   return size;
1526 }
1527 
1528 
1529 /* Take build ref table for either the uses or defs from the reg-use
1530    or reg-def chains.  This version processes the refs in reg order
1531    which is likely to be best if processing the whole function.  */
1532 
1533 static void
1534 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1535 				  bool include_defs,
1536 				  bool include_uses,
1537 				  bool include_eq_uses)
1538 {
1539   unsigned int m = df->regs_inited;
1540   unsigned int regno;
1541   unsigned int offset = 0;
1542   unsigned int start;
1543 
1544   if (df->changeable_flags & DF_NO_HARD_REGS)
1545     {
1546       start = FIRST_PSEUDO_REGISTER;
1547       memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1548       memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1549     }
1550   else
1551     start = 0;
1552 
1553   ref_info->total_size
1554     = df_count_refs (include_defs, include_uses, include_eq_uses);
1555 
1556   df_check_and_grow_ref_info (ref_info, 1);
1557 
1558   for (regno = start; regno < m; regno++)
1559     {
1560       int count = 0;
1561       ref_info->begin[regno] = offset;
1562       if (include_defs)
1563 	{
1564 	  df_ref ref = DF_REG_DEF_CHAIN (regno);
1565 	  while (ref)
1566 	    {
1567 	      ref_info->refs[offset] = ref;
1568 	      DF_REF_ID (ref) = offset++;
1569 	      count++;
1570 	      ref = DF_REF_NEXT_REG (ref);
1571 	      gcc_assert (offset < ref_info->refs_size);
1572 	    }
1573 	}
1574       if (include_uses)
1575 	{
1576 	  df_ref ref = DF_REG_USE_CHAIN (regno);
1577 	  while (ref)
1578 	    {
1579 	      ref_info->refs[offset] = ref;
1580 	      DF_REF_ID (ref) = offset++;
1581 	      count++;
1582 	      ref = DF_REF_NEXT_REG (ref);
1583 	      gcc_assert (offset < ref_info->refs_size);
1584 	    }
1585 	}
1586       if (include_eq_uses)
1587 	{
1588 	  df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1589 	  while (ref)
1590 	    {
1591 	      ref_info->refs[offset] = ref;
1592 	      DF_REF_ID (ref) = offset++;
1593 	      count++;
1594 	      ref = DF_REF_NEXT_REG (ref);
1595 	      gcc_assert (offset < ref_info->refs_size);
1596 	    }
1597 	}
1598       ref_info->count[regno] = count;
1599     }
1600 
1601   /* The bitmap size is not decremented when refs are deleted.  So
1602      reset it now that we have squished out all of the empty
1603      slots.  */
1604   ref_info->table_size = offset;
1605 }
1606 
1607 
1608 /* Take build ref table for either the uses or defs from the reg-use
1609    or reg-def chains.  This version processes the refs in insn order
1610    which is likely to be best if processing some segment of the
1611    function.  */
1612 
1613 static void
1614 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1615 				   bool include_defs,
1616 				   bool include_uses,
1617 				   bool include_eq_uses)
1618 {
1619   bitmap_iterator bi;
1620   unsigned int bb_index;
1621   unsigned int m = df->regs_inited;
1622   unsigned int offset = 0;
1623   unsigned int r;
1624   unsigned int start
1625     = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1626 
1627   memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1628   memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1629 
1630   ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1631   df_check_and_grow_ref_info (ref_info, 1);
1632 
1633   EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1634     {
1635       basic_block bb = BASIC_BLOCK (bb_index);
1636       rtx insn;
1637       df_ref *ref_rec;
1638 
1639       if (include_defs)
1640 	for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1641 	  {
1642 	    unsigned int regno = DF_REF_REGNO (*ref_rec);
1643 	    ref_info->count[regno]++;
1644 	  }
1645       if (include_uses)
1646 	for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1647 	  {
1648 	    unsigned int regno = DF_REF_REGNO (*ref_rec);
1649 	    ref_info->count[regno]++;
1650 	  }
1651 
1652       FOR_BB_INSNS (bb, insn)
1653 	{
1654 	  if (INSN_P (insn))
1655 	    {
1656 	      unsigned int uid = INSN_UID (insn);
1657 
1658 	      if (include_defs)
1659 		for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1660 		  {
1661 		    unsigned int regno = DF_REF_REGNO (*ref_rec);
1662 		    ref_info->count[regno]++;
1663 		  }
1664 	      if (include_uses)
1665 		for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1666 		  {
1667 		    unsigned int regno = DF_REF_REGNO (*ref_rec);
1668 		    ref_info->count[regno]++;
1669 		  }
1670 	      if (include_eq_uses)
1671 		for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1672 		  {
1673 		    unsigned int regno = DF_REF_REGNO (*ref_rec);
1674 		    ref_info->count[regno]++;
1675 		  }
1676 	    }
1677 	}
1678     }
1679 
1680   for (r = start; r < m; r++)
1681     {
1682       ref_info->begin[r] = offset;
1683       offset += ref_info->count[r];
1684       ref_info->count[r] = 0;
1685     }
1686 
1687   EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1688     {
1689       basic_block bb = BASIC_BLOCK (bb_index);
1690       rtx insn;
1691       df_ref *ref_rec;
1692 
1693       if (include_defs)
1694 	for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1695 	  {
1696 	    df_ref ref = *ref_rec;
1697 	    unsigned int regno = DF_REF_REGNO (ref);
1698 	    if (regno >= start)
1699 	      {
1700 		unsigned int id
1701 		  = ref_info->begin[regno] + ref_info->count[regno]++;
1702 		DF_REF_ID (ref) = id;
1703 		ref_info->refs[id] = ref;
1704 	      }
1705 	  }
1706       if (include_uses)
1707 	for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1708 	  {
1709 	    df_ref ref = *ref_rec;
1710 	    unsigned int regno = DF_REF_REGNO (ref);
1711 	    if (regno >= start)
1712 	      {
1713 		unsigned int id
1714 		  = ref_info->begin[regno] + ref_info->count[regno]++;
1715 		DF_REF_ID (ref) = id;
1716 		ref_info->refs[id] = ref;
1717 	      }
1718 	  }
1719 
1720       FOR_BB_INSNS (bb, insn)
1721 	{
1722 	  if (INSN_P (insn))
1723 	    {
1724 	      unsigned int uid = INSN_UID (insn);
1725 
1726 	      if (include_defs)
1727 		for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1728 		  {
1729 		    df_ref ref = *ref_rec;
1730 		    unsigned int regno = DF_REF_REGNO (ref);
1731 		    if (regno >= start)
1732 		      {
1733 			unsigned int id
1734 			  = ref_info->begin[regno] + ref_info->count[regno]++;
1735 			DF_REF_ID (ref) = id;
1736 			ref_info->refs[id] = ref;
1737 		      }
1738 		  }
1739 	      if (include_uses)
1740 		for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1741 		  {
1742 		    df_ref ref = *ref_rec;
1743 		    unsigned int regno = DF_REF_REGNO (ref);
1744 		    if (regno >= start)
1745 		      {
1746 			unsigned int id
1747 			  = ref_info->begin[regno] + ref_info->count[regno]++;
1748 			DF_REF_ID (ref) = id;
1749 			ref_info->refs[id] = ref;
1750 		      }
1751 		  }
1752 	      if (include_eq_uses)
1753 		for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1754 		  {
1755 		    df_ref ref = *ref_rec;
1756 		    unsigned int regno = DF_REF_REGNO (ref);
1757 		    if (regno >= start)
1758 		      {
1759 			unsigned int id
1760 			  = ref_info->begin[regno] + ref_info->count[regno]++;
1761 			DF_REF_ID (ref) = id;
1762 			ref_info->refs[id] = ref;
1763 		      }
1764 		  }
1765 	    }
1766 	}
1767     }
1768 
1769   /* The bitmap size is not decremented when refs are deleted.  So
1770      reset it now that we have squished out all of the empty
1771      slots.  */
1772 
1773   ref_info->table_size = offset;
1774 }
1775 
1776 /* Take build ref table for either the uses or defs from the reg-use
1777    or reg-def chains.  */
1778 
1779 static void
1780 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1781 			   bool include_defs,
1782 			   bool include_uses,
1783 			   bool include_eq_uses)
1784 {
1785   if (df->analyze_subset)
1786     df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1787 				       include_uses, include_eq_uses);
1788   else
1789     df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1790 				       include_uses, include_eq_uses);
1791 }
1792 
1793 
1794 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET.  */
1795 static unsigned int
1796 df_add_refs_to_table (unsigned int offset,
1797 		      struct df_ref_info *ref_info,
1798 		      df_ref *ref_vec)
1799 {
1800   while (*ref_vec)
1801     {
1802       df_ref ref = *ref_vec;
1803       if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1804 	  || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1805 	{
1806 	  ref_info->refs[offset] = ref;
1807 	  DF_REF_ID (*ref_vec) = offset++;
1808 	}
1809       ref_vec++;
1810     }
1811   return offset;
1812 }
1813 
1814 
1815 /* Count the number of refs in all of the insns of BB. Include the
1816    defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1817    eq_uses if INCLUDE_EQ_USES.  */
1818 
1819 static unsigned int
1820 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1821 			       struct df_ref_info *ref_info,
1822 			       bool include_defs, bool include_uses,
1823 			       bool include_eq_uses)
1824 {
1825   rtx insn;
1826 
1827   if (include_defs)
1828     offset = df_add_refs_to_table (offset, ref_info,
1829 				   df_get_artificial_defs (bb->index));
1830   if (include_uses)
1831     offset = df_add_refs_to_table (offset, ref_info,
1832 				   df_get_artificial_uses (bb->index));
1833 
1834   FOR_BB_INSNS (bb, insn)
1835     if (INSN_P (insn))
1836       {
1837 	unsigned int uid = INSN_UID (insn);
1838 	if (include_defs)
1839 	  offset = df_add_refs_to_table (offset, ref_info,
1840 					 DF_INSN_UID_DEFS (uid));
1841 	if (include_uses)
1842 	  offset = df_add_refs_to_table (offset, ref_info,
1843 					 DF_INSN_UID_USES (uid));
1844 	if (include_eq_uses)
1845 	  offset = df_add_refs_to_table (offset, ref_info,
1846 					 DF_INSN_UID_EQ_USES (uid));
1847       }
1848   return offset;
1849 }
1850 
1851 
1852 /* Organize the refs by insn into the table in REF_INFO.  If
1853    blocks_to_analyze is defined, use that set, otherwise the entire
1854    program.  Include the defs if INCLUDE_DEFS. Include the uses if
1855    INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES.  */
1856 
1857 static void
1858 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1859 			    bool include_defs, bool include_uses,
1860 			    bool include_eq_uses)
1861 {
1862   basic_block bb;
1863   unsigned int offset = 0;
1864 
1865   ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1866   df_check_and_grow_ref_info (ref_info, 1);
1867   if (df->blocks_to_analyze)
1868     {
1869       bitmap_iterator bi;
1870       unsigned int index;
1871 
1872       EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1873 	{
1874 	  offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1875 						  include_defs, include_uses,
1876 						  include_eq_uses);
1877 	}
1878 
1879       ref_info->table_size = offset;
1880     }
1881   else
1882     {
1883       FOR_ALL_BB (bb)
1884 	offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1885 						include_defs, include_uses,
1886 						include_eq_uses);
1887       ref_info->table_size = offset;
1888     }
1889 }
1890 
1891 
1892 /* If the use refs in DF are not organized, reorganize them.  */
1893 
1894 void
1895 df_maybe_reorganize_use_refs (enum df_ref_order order)
1896 {
1897   if (order == df->use_info.ref_order)
1898     return;
1899 
1900   switch (order)
1901     {
1902     case DF_REF_ORDER_BY_REG:
1903       df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1904       break;
1905 
1906     case DF_REF_ORDER_BY_REG_WITH_NOTES:
1907       df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1908       break;
1909 
1910     case DF_REF_ORDER_BY_INSN:
1911       df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1912       break;
1913 
1914     case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1915       df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1916       break;
1917 
1918     case DF_REF_ORDER_NO_TABLE:
1919       free (df->use_info.refs);
1920       df->use_info.refs = NULL;
1921       df->use_info.refs_size = 0;
1922       break;
1923 
1924     case DF_REF_ORDER_UNORDERED:
1925     case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1926       gcc_unreachable ();
1927       break;
1928     }
1929 
1930   df->use_info.ref_order = order;
1931 }
1932 
1933 
1934 /* If the def refs in DF are not organized, reorganize them.  */
1935 
1936 void
1937 df_maybe_reorganize_def_refs (enum df_ref_order order)
1938 {
1939   if (order == df->def_info.ref_order)
1940     return;
1941 
1942   switch (order)
1943     {
1944     case DF_REF_ORDER_BY_REG:
1945       df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1946       break;
1947 
1948     case DF_REF_ORDER_BY_INSN:
1949       df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1950       break;
1951 
1952     case DF_REF_ORDER_NO_TABLE:
1953       free (df->def_info.refs);
1954       df->def_info.refs = NULL;
1955       df->def_info.refs_size = 0;
1956       break;
1957 
1958     case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1959     case DF_REF_ORDER_BY_REG_WITH_NOTES:
1960     case DF_REF_ORDER_UNORDERED:
1961     case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1962       gcc_unreachable ();
1963       break;
1964     }
1965 
1966   df->def_info.ref_order = order;
1967 }
1968 
1969 
1970 /* Change all of the basic block references in INSN to use the insn's
1971    current basic block.  This function is called from routines that move
1972    instructions from one block to another.  */
1973 
1974 void
1975 df_insn_change_bb (rtx insn, basic_block new_bb)
1976 {
1977   basic_block old_bb = BLOCK_FOR_INSN (insn);
1978   struct df_insn_info *insn_info;
1979   unsigned int uid = INSN_UID (insn);
1980 
1981   if (old_bb == new_bb)
1982     return;
1983 
1984   set_block_for_insn (insn, new_bb);
1985 
1986   if (!df)
1987     return;
1988 
1989   if (dump_file)
1990     fprintf (dump_file, "changing bb of uid %d\n", uid);
1991 
1992   insn_info = DF_INSN_UID_SAFE_GET (uid);
1993   if (insn_info == NULL)
1994     {
1995       if (dump_file)
1996 	fprintf (dump_file, "  unscanned insn\n");
1997       df_insn_rescan (insn);
1998       return;
1999     }
2000 
2001   if (!INSN_P (insn))
2002     return;
2003 
2004   df_set_bb_dirty (new_bb);
2005   if (old_bb)
2006     {
2007       if (dump_file)
2008 	fprintf (dump_file, "  from %d to %d\n",
2009 		 old_bb->index, new_bb->index);
2010       df_set_bb_dirty (old_bb);
2011     }
2012   else
2013     if (dump_file)
2014       fprintf (dump_file, "  to %d\n", new_bb->index);
2015 }
2016 
2017 
2018 /* Helper function for df_ref_change_reg_with_loc.  */
2019 
2020 static void
2021 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
2022 			      struct df_reg_info *new_df,
2023 			      int new_regno, rtx loc)
2024 {
2025   df_ref the_ref = old_df->reg_chain;
2026 
2027   while (the_ref)
2028     {
2029       if ((!DF_REF_IS_ARTIFICIAL (the_ref))
2030 	  && (DF_REF_LOC (the_ref))
2031 	  && (*DF_REF_LOC (the_ref) == loc))
2032 	{
2033 	  df_ref next_ref = DF_REF_NEXT_REG (the_ref);
2034 	  df_ref prev_ref = DF_REF_PREV_REG (the_ref);
2035 	  df_ref *ref_vec, *ref_vec_t;
2036 	  struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
2037 	  unsigned int count = 0;
2038 
2039 	  DF_REF_REGNO (the_ref) = new_regno;
2040 	  DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
2041 
2042 	  /* Pull the_ref out of the old regno chain.  */
2043 	  if (prev_ref)
2044 	    DF_REF_NEXT_REG (prev_ref) = next_ref;
2045 	  else
2046 	    old_df->reg_chain = next_ref;
2047 	  if (next_ref)
2048 	    DF_REF_PREV_REG (next_ref) = prev_ref;
2049 	  old_df->n_refs--;
2050 
2051 	  /* Put the ref into the new regno chain.  */
2052 	  DF_REF_PREV_REG (the_ref) = NULL;
2053 	  DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2054 	  if (new_df->reg_chain)
2055 	    DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2056 	  new_df->reg_chain = the_ref;
2057 	  new_df->n_refs++;
2058 	  if (DF_REF_BB (the_ref))
2059 	    df_set_bb_dirty (DF_REF_BB (the_ref));
2060 
2061 	  /* Need to sort the record again that the ref was in because
2062 	     the regno is a sorting key.  First, find the right
2063 	     record.  */
2064 	  if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2065 	    ref_vec = insn_info->eq_uses;
2066 	  else
2067 	    ref_vec = insn_info->uses;
2068 	  if (dump_file)
2069 	    fprintf (dump_file, "changing reg in insn %d\n",
2070 		     DF_REF_INSN_UID (the_ref));
2071 
2072 	  ref_vec_t = ref_vec;
2073 
2074 	  /* Find the length.  */
2075 	  while (*ref_vec_t)
2076 	    {
2077 	      count++;
2078 	      ref_vec_t++;
2079 	    }
2080 	  qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2081 
2082 	  the_ref = next_ref;
2083 	}
2084       else
2085 	the_ref = DF_REF_NEXT_REG (the_ref);
2086     }
2087 }
2088 
2089 
2090 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2091    NEW_REGNO.  Refs that do not match LOC are not changed which means
2092    that artificial refs are not changed since they have no loc.  This
2093    call is to support the SET_REGNO macro. */
2094 
2095 void
2096 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2097 {
2098   if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2099     return;
2100 
2101   df_grow_reg_info ();
2102 
2103   df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2104 				DF_REG_DEF_GET (new_regno), new_regno, loc);
2105   df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2106 				DF_REG_USE_GET (new_regno), new_regno, loc);
2107   df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2108 				DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2109 }
2110 
2111 
2112 /* Delete the mw_hardregs that point into the eq_notes.  */
2113 
2114 static unsigned int
2115 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2116 {
2117   struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2118   unsigned int deleted = 0;
2119   unsigned int count = 0;
2120   struct df_scan_problem_data *problem_data
2121     = (struct df_scan_problem_data *) df_scan->problem_data;
2122 
2123   if (!*mw_vec)
2124     return 0;
2125 
2126   while (*mw_vec)
2127     {
2128       if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2129 	{
2130 	  struct df_mw_hardreg **temp_vec = mw_vec;
2131 
2132 	  pool_free (problem_data->mw_reg_pool, *mw_vec);
2133 	  temp_vec = mw_vec;
2134 	  /* Shove the remaining ones down one to fill the gap.  While
2135 	     this looks n**2, it is highly unusual to have any mw regs
2136 	     in eq_notes and the chances of more than one are almost
2137 	     non existent.  */
2138 	  while (*temp_vec)
2139 	    {
2140 	      *temp_vec = *(temp_vec + 1);
2141 	      temp_vec++;
2142 	    }
2143 	  deleted++;
2144 	}
2145       else
2146 	{
2147 	  mw_vec++;
2148 	  count++;
2149 	}
2150     }
2151 
2152   if (count == 0)
2153     {
2154       df_scan_free_mws_vec (insn_info->mw_hardregs);
2155       insn_info->mw_hardregs = df_null_mw_rec;
2156       return 0;
2157     }
2158   return deleted;
2159 }
2160 
2161 
2162 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN.  */
2163 
2164 void
2165 df_notes_rescan (rtx insn)
2166 {
2167   struct df_insn_info *insn_info;
2168   unsigned int uid = INSN_UID (insn);
2169 
2170   if (!df)
2171     return;
2172 
2173   /* The client has disabled rescanning and plans to do it itself.  */
2174   if (df->changeable_flags & DF_NO_INSN_RESCAN)
2175     return;
2176 
2177   /* Do nothing if the insn hasn't been emitted yet.  */
2178   if (!BLOCK_FOR_INSN (insn))
2179     return;
2180 
2181   df_grow_bb_info (df_scan);
2182   df_grow_reg_info ();
2183 
2184   insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2185 
2186   /* The client has deferred rescanning.  */
2187   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2188     {
2189       if (!insn_info)
2190 	{
2191 	  insn_info = df_insn_create_insn_record (insn);
2192 	  insn_info->defs = df_null_ref_rec;
2193 	  insn_info->uses = df_null_ref_rec;
2194 	  insn_info->eq_uses = df_null_ref_rec;
2195 	  insn_info->mw_hardregs = df_null_mw_rec;
2196 	}
2197 
2198       bitmap_clear_bit (df->insns_to_delete, uid);
2199       /* If the insn is set to be rescanned, it does not need to also
2200 	 be notes rescanned.  */
2201       if (!bitmap_bit_p (df->insns_to_rescan, uid))
2202 	bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2203       return;
2204     }
2205 
2206   bitmap_clear_bit (df->insns_to_delete, uid);
2207   bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2208 
2209   if (insn_info)
2210     {
2211       basic_block bb = BLOCK_FOR_INSN (insn);
2212       rtx note;
2213       struct df_collection_rec collection_rec;
2214       unsigned int num_deleted;
2215       unsigned int mw_len;
2216 
2217       memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2218       collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
2219       collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
2220 
2221       num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2222       df_ref_chain_delete (insn_info->eq_uses);
2223       insn_info->eq_uses = NULL;
2224 
2225       /* Process REG_EQUIV/REG_EQUAL notes */
2226       for (note = REG_NOTES (insn); note;
2227 	   note = XEXP (note, 1))
2228 	{
2229 	  switch (REG_NOTE_KIND (note))
2230 	    {
2231 	    case REG_EQUIV:
2232 	    case REG_EQUAL:
2233 	      df_uses_record (DF_REF_REGULAR, &collection_rec,
2234 			      &XEXP (note, 0), DF_REF_REG_USE,
2235 			      bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
2236 	    default:
2237 	      break;
2238 	    }
2239 	}
2240 
2241       /* Find some place to put any new mw_hardregs.  */
2242       df_canonize_collection_rec (&collection_rec);
2243       mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
2244       if (mw_len)
2245 	{
2246 	  unsigned int count = 0;
2247 	  struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2248 	  while (*mw_rec)
2249 	    {
2250 	      count++;
2251 	      mw_rec++;
2252 	    }
2253 
2254 	  if (count)
2255 	    {
2256 	      /* Append to the end of the existing record after
2257 		 expanding it if necessary.  */
2258 	      if (mw_len > num_deleted)
2259 		{
2260 		  insn_info->mw_hardregs =
2261 		    XRESIZEVEC (struct df_mw_hardreg *,
2262 				insn_info->mw_hardregs,
2263 				count + 1 + mw_len);
2264 		}
2265 	      memcpy (&insn_info->mw_hardregs[count],
2266 		      VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2267 		      mw_len * sizeof (struct df_mw_hardreg *));
2268 	      insn_info->mw_hardregs[count + mw_len] = NULL;
2269 	      qsort (insn_info->mw_hardregs, count + mw_len,
2270 		     sizeof (struct df_mw_hardreg *), df_mw_compare);
2271 	    }
2272 	  else
2273 	    {
2274 	      /* No vector there. */
2275 	      insn_info->mw_hardregs
2276 		= XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2277 	      memcpy (insn_info->mw_hardregs,
2278 		      VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2279 		      mw_len * sizeof (struct df_mw_hardreg *));
2280 	      insn_info->mw_hardregs[mw_len] = NULL;
2281 	    }
2282 	}
2283       /* Get rid of the mw_rec so that df_refs_add_to_chains will
2284 	 ignore it.  */
2285       VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
2286       df_refs_add_to_chains (&collection_rec, bb, insn);
2287       VEC_free (df_ref, stack, collection_rec.eq_use_vec);
2288     }
2289   else
2290     df_insn_rescan (insn);
2291 
2292 }
2293 
2294 
2295 /*----------------------------------------------------------------------------
2296    Hard core instruction scanning code.  No external interfaces here,
2297    just a lot of routines that look inside insns.
2298 ----------------------------------------------------------------------------*/
2299 
2300 
2301 /* Return true if the contents of two df_ref's are identical.
2302    It ignores DF_REF_MARKER.  */
2303 
2304 static bool
2305 df_ref_equal_p (df_ref ref1, df_ref ref2)
2306 {
2307   if (!ref2)
2308     return false;
2309 
2310   if (ref1 == ref2)
2311     return true;
2312 
2313   if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2314       || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2315       || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2316       || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2317       || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2318 	  != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2319       || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2320       || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2321     return false;
2322 
2323   switch (DF_REF_CLASS (ref1))
2324     {
2325     case DF_REF_ARTIFICIAL:
2326     case DF_REF_BASE:
2327       return true;
2328 
2329     case DF_REF_EXTRACT:
2330       if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2331 	  || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2332 	  || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
2333 	return false;
2334       /* fallthru.  */
2335 
2336     case DF_REF_REGULAR:
2337       return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2338 
2339     default:
2340       gcc_unreachable ();
2341     }
2342   return false;
2343 }
2344 
2345 
2346 /* Compare REF1 and REF2 for sorting.  This is only called from places
2347    where all of the refs are of the same type, in the same insn, and
2348    have the same bb.  So these fields are not checked.  */
2349 
2350 static int
2351 df_ref_compare (const void *r1, const void *r2)
2352 {
2353   const df_ref ref1 = *(const df_ref *)r1;
2354   const df_ref ref2 = *(const df_ref *)r2;
2355 
2356   if (ref1 == ref2)
2357     return 0;
2358 
2359   if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2360     return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2361 
2362   if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2363     return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2364 
2365   if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2366     return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2367 
2368   if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2369     return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2370 
2371   /* Cannot look at the LOC field on artificial refs.  */
2372   if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2373       && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2374     return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2375 
2376   if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2377     {
2378       /* If two refs are identical except that one of them has is from
2379 	 a mw and one is not, we need to have the one with the mw
2380 	 first.  */
2381       if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2382 	  DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2383 	return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2384       else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2385 	return -1;
2386       else
2387 	return 1;
2388     }
2389 
2390   /* The classes are the same at this point so it is safe to only look
2391      at ref1.  */
2392   if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
2393     {
2394       if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2395 	return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
2396       if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2397 	return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
2398       if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
2399 	return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
2400     }
2401   return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2402 }
2403 
2404 static void
2405 df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
2406 {
2407   df_ref tmp = VEC_index (df_ref, *ref_vec, i);
2408   VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
2409   VEC_replace (df_ref, *ref_vec, j, tmp);
2410 }
2411 
2412 /* Sort and compress a set of refs.  */
2413 
2414 static void
2415 df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
2416 {
2417   unsigned int count;
2418   unsigned int i;
2419   unsigned int dist = 0;
2420 
2421   count = VEC_length (df_ref, *ref_vec);
2422 
2423   /* If there are 1 or 0 elements, there is nothing to do.  */
2424   if (count < 2)
2425     return;
2426   else if (count == 2)
2427     {
2428       df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
2429       df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
2430       if (df_ref_compare (&r0, &r1) > 0)
2431         df_swap_refs (ref_vec, 0, 1);
2432     }
2433   else
2434     {
2435       for (i = 0; i < count - 1; i++)
2436 	{
2437 	  df_ref r0 = VEC_index (df_ref, *ref_vec, i);
2438 	  df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
2439 	  if (df_ref_compare (&r0, &r1) >= 0)
2440 	    break;
2441 	}
2442       /* If the array is already strictly ordered,
2443          which is the most common case for large COUNT case
2444          (which happens for CALL INSNs),
2445          no need to sort and filter out duplicate.
2446          Simply return the count.
2447          Make sure DF_GET_ADD_REFS adds refs in the increasing order
2448          of DF_REF_COMPARE.  */
2449       if (i == count - 1)
2450         return;
2451       qsort (VEC_address (df_ref, *ref_vec), count, sizeof (df_ref),
2452 	     df_ref_compare);
2453     }
2454 
2455   for (i=0; i<count-dist; i++)
2456     {
2457       /* Find the next ref that is not equal to the current ref.  */
2458       while (i + dist + 1 < count
2459 	     && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
2460 				VEC_index (df_ref, *ref_vec, i + dist + 1)))
2461 	{
2462 	  df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
2463 	  dist++;
2464 	}
2465       /* Copy it down to the next position.  */
2466       if (dist && i + dist + 1 < count)
2467 	VEC_replace (df_ref, *ref_vec, i + 1,
2468 		     VEC_index (df_ref, *ref_vec, i + dist + 1));
2469     }
2470 
2471   count -= dist;
2472   VEC_truncate (df_ref, *ref_vec, count);
2473 }
2474 
2475 
2476 /* Return true if the contents of two df_ref's are identical.
2477    It ignores DF_REF_MARKER.  */
2478 
2479 static bool
2480 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2481 {
2482   if (!mw2)
2483     return false;
2484   return (mw1 == mw2) ||
2485     (mw1->mw_reg == mw2->mw_reg
2486      && mw1->type == mw2->type
2487      && mw1->flags == mw2->flags
2488      && mw1->start_regno == mw2->start_regno
2489      && mw1->end_regno == mw2->end_regno);
2490 }
2491 
2492 
2493 /* Compare MW1 and MW2 for sorting.  */
2494 
2495 static int
2496 df_mw_compare (const void *m1, const void *m2)
2497 {
2498   const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2499   const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2500 
2501   if (mw1 == mw2)
2502     return 0;
2503 
2504   if (mw1->type != mw2->type)
2505     return mw1->type - mw2->type;
2506 
2507   if (mw1->flags != mw2->flags)
2508     return mw1->flags - mw2->flags;
2509 
2510   if (mw1->start_regno != mw2->start_regno)
2511     return mw1->start_regno - mw2->start_regno;
2512 
2513   if (mw1->end_regno != mw2->end_regno)
2514     return mw1->end_regno - mw2->end_regno;
2515 
2516   if (mw1->mw_reg != mw2->mw_reg)
2517     return mw1->mw_order - mw2->mw_order;
2518 
2519   return 0;
2520 }
2521 
2522 
2523 /* Sort and compress a set of refs.  */
2524 
2525 static void
2526 df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
2527 {
2528   unsigned int count;
2529   struct df_scan_problem_data *problem_data
2530     = (struct df_scan_problem_data *) df_scan->problem_data;
2531   unsigned int i;
2532   unsigned int dist = 0;
2533 
2534   count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
2535   if (count < 2)
2536     return;
2537   else if (count == 2)
2538     {
2539       struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
2540       struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
2541       if (df_mw_compare (&m0, &m1) > 0)
2542         {
2543           struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
2544 						 *mw_vec, 0);
2545 	  VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
2546 		       VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
2547 	  VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
2548         }
2549     }
2550   else
2551     qsort (VEC_address (df_mw_hardreg_ptr, *mw_vec), count,
2552 	   sizeof (struct df_mw_hardreg *), df_mw_compare);
2553 
2554   for (i=0; i<count-dist; i++)
2555     {
2556       /* Find the next ref that is not equal to the current ref.  */
2557       while (i + dist + 1 < count
2558 	     && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
2559 			       VEC_index (df_mw_hardreg_ptr, *mw_vec,
2560 					  i + dist + 1)))
2561 	{
2562 	  pool_free (problem_data->mw_reg_pool,
2563 		     VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2564 	  dist++;
2565 	}
2566       /* Copy it down to the next position.  */
2567       if (dist && i + dist + 1 < count)
2568 	VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
2569 		     VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2570     }
2571 
2572   count -= dist;
2573   VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
2574 }
2575 
2576 
2577 /* Sort and remove duplicates from the COLLECTION_REC.  */
2578 
2579 static void
2580 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2581 {
2582   df_sort_and_compress_refs (&collection_rec->def_vec);
2583   df_sort_and_compress_refs (&collection_rec->use_vec);
2584   df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2585   df_sort_and_compress_mws (&collection_rec->mw_vec);
2586 }
2587 
2588 
2589 /* Add the new df_ref to appropriate reg_info/ref_info chains.  */
2590 
2591 static void
2592 df_install_ref (df_ref this_ref,
2593 		struct df_reg_info *reg_info,
2594 		struct df_ref_info *ref_info,
2595 		bool add_to_table)
2596 {
2597   unsigned int regno = DF_REF_REGNO (this_ref);
2598   /* Add the ref to the reg_{def,use,eq_use} chain.  */
2599   df_ref head = reg_info->reg_chain;
2600 
2601   reg_info->reg_chain = this_ref;
2602   reg_info->n_refs++;
2603 
2604   if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2605     {
2606       gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2607       df->hard_regs_live_count[regno]++;
2608     }
2609 
2610   gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL
2611 	      && DF_REF_PREV_REG (this_ref) == NULL);
2612 
2613   DF_REF_NEXT_REG (this_ref) = head;
2614 
2615   /* We cannot actually link to the head of the chain.  */
2616   DF_REF_PREV_REG (this_ref) = NULL;
2617 
2618   if (head)
2619     DF_REF_PREV_REG (head) = this_ref;
2620 
2621   if (add_to_table)
2622     {
2623       gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2624       df_check_and_grow_ref_info (ref_info, 1);
2625       DF_REF_ID (this_ref) = ref_info->table_size;
2626       /* Add the ref to the big array of defs.  */
2627       ref_info->refs[ref_info->table_size] = this_ref;
2628       ref_info->table_size++;
2629     }
2630   else
2631     DF_REF_ID (this_ref) = -1;
2632 
2633   ref_info->total_size++;
2634 }
2635 
2636 
2637 /* This function takes one of the groups of refs (defs, uses or
2638    eq_uses) and installs the entire group into the insn.  It also adds
2639    each of these refs into the appropriate chains.  */
2640 
2641 static df_ref *
2642 df_install_refs (basic_block bb,
2643 		 VEC(df_ref,stack)* old_vec,
2644 		 struct df_reg_info **reg_info,
2645 		 struct df_ref_info *ref_info,
2646 		 bool is_notes)
2647 {
2648   unsigned int count;
2649 
2650   count = VEC_length (df_ref, old_vec);
2651   if (count)
2652     {
2653       df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2654       bool add_to_table;
2655       df_ref this_ref;
2656       unsigned int ix;
2657 
2658       switch (ref_info->ref_order)
2659 	{
2660 	case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2661 	case DF_REF_ORDER_BY_REG_WITH_NOTES:
2662 	case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2663 	  ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2664 	  add_to_table = true;
2665 	  break;
2666 	case DF_REF_ORDER_UNORDERED:
2667 	case DF_REF_ORDER_BY_REG:
2668 	case DF_REF_ORDER_BY_INSN:
2669 	  ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2670 	  add_to_table = !is_notes;
2671 	  break;
2672 	default:
2673 	  add_to_table = false;
2674 	  break;
2675 	}
2676 
2677       /* Do not add if ref is not in the right blocks.  */
2678       if (add_to_table && df->analyze_subset)
2679 	add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2680 
2681       for (ix = 0; VEC_iterate (df_ref, old_vec, ix, this_ref); ++ix)
2682 	{
2683 	  new_vec[ix] = this_ref;
2684 	  df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2685 			  ref_info, add_to_table);
2686 	}
2687 
2688       new_vec[count] = NULL;
2689       return new_vec;
2690     }
2691   else
2692     return df_null_ref_rec;
2693 }
2694 
2695 
2696 /* This function takes the mws installs the entire group into the
2697    insn.  */
2698 
2699 static struct df_mw_hardreg **
2700 df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
2701 {
2702   unsigned int count;
2703 
2704   count = VEC_length (df_mw_hardreg_ptr, old_vec);
2705   if (count)
2706     {
2707       struct df_mw_hardreg **new_vec
2708 	= XNEWVEC (struct df_mw_hardreg*, count + 1);
2709       memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2710 	      sizeof (struct df_mw_hardreg*) * count);
2711       new_vec[count] = NULL;
2712       return new_vec;
2713     }
2714   else
2715     return df_null_mw_rec;
2716 }
2717 
2718 
2719 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2720    chains and update other necessary information.  */
2721 
2722 static void
2723 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2724 		       basic_block bb, rtx insn)
2725 {
2726   if (insn)
2727     {
2728       struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2729       /* If there is a vector in the collection rec, add it to the
2730 	 insn.  A null rec is a signal that the caller will handle the
2731 	 chain specially.  */
2732       if (collection_rec->def_vec)
2733 	{
2734 	  df_scan_free_ref_vec (insn_rec->defs);
2735 	  insn_rec->defs
2736 	    = df_install_refs (bb, collection_rec->def_vec,
2737 			       df->def_regs,
2738 			       &df->def_info, false);
2739 	}
2740       if (collection_rec->use_vec)
2741 	{
2742 	  df_scan_free_ref_vec (insn_rec->uses);
2743 	  insn_rec->uses
2744 	    = df_install_refs (bb, collection_rec->use_vec,
2745 			       df->use_regs,
2746 			       &df->use_info, false);
2747 	}
2748       if (collection_rec->eq_use_vec)
2749 	{
2750 	  df_scan_free_ref_vec (insn_rec->eq_uses);
2751 	  insn_rec->eq_uses
2752 	    = df_install_refs (bb, collection_rec->eq_use_vec,
2753 			       df->eq_use_regs,
2754 			       &df->use_info, true);
2755 	}
2756       if (collection_rec->mw_vec)
2757 	{
2758 	  df_scan_free_mws_vec (insn_rec->mw_hardregs);
2759 	  insn_rec->mw_hardregs
2760 	    = df_install_mws (collection_rec->mw_vec);
2761 	}
2762     }
2763   else
2764     {
2765       struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2766 
2767       df_scan_free_ref_vec (bb_info->artificial_defs);
2768       bb_info->artificial_defs
2769 	= df_install_refs (bb, collection_rec->def_vec,
2770 			   df->def_regs,
2771 			   &df->def_info, false);
2772       df_scan_free_ref_vec (bb_info->artificial_uses);
2773       bb_info->artificial_uses
2774 	= df_install_refs (bb, collection_rec->use_vec,
2775 			   df->use_regs,
2776 			   &df->use_info, false);
2777     }
2778 }
2779 
2780 
2781 /* Allocate a ref and initialize its fields.
2782 
2783    If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2784    DF_REF_ZERO_EXTRACT.  WIDTH, OFFSET and MODE are used to access the fields
2785    if they were constants.  Otherwise they should be -1 if those flags
2786    were set.  */
2787 
2788 static df_ref
2789 df_ref_create_structure (enum df_ref_class cl,
2790 			 struct df_collection_rec *collection_rec,
2791 			 rtx reg, rtx *loc,
2792 			 basic_block bb, struct df_insn_info *info,
2793 			 enum df_ref_type ref_type,
2794 			 int ref_flags,
2795 			 int width, int offset, enum machine_mode mode)
2796 {
2797   df_ref this_ref = NULL;
2798   int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2799   struct df_scan_problem_data *problem_data
2800     = (struct df_scan_problem_data *) df_scan->problem_data;
2801 
2802   switch (cl)
2803     {
2804     case DF_REF_BASE:
2805       this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2806       gcc_assert (loc == NULL);
2807       break;
2808 
2809     case DF_REF_ARTIFICIAL:
2810       this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2811       this_ref->artificial_ref.bb = bb;
2812       gcc_assert (loc == NULL);
2813       break;
2814 
2815     case DF_REF_REGULAR:
2816       this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2817       this_ref->regular_ref.loc = loc;
2818       gcc_assert (loc);
2819       break;
2820 
2821     case DF_REF_EXTRACT:
2822       this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
2823       DF_REF_EXTRACT_WIDTH (this_ref) = width;
2824       DF_REF_EXTRACT_OFFSET (this_ref) = offset;
2825       DF_REF_EXTRACT_MODE (this_ref) = mode;
2826       this_ref->regular_ref.loc = loc;
2827       gcc_assert (loc);
2828       break;
2829     }
2830 
2831   DF_REF_CLASS (this_ref) = cl;
2832   DF_REF_ID (this_ref) = -1;
2833   DF_REF_REG (this_ref) = reg;
2834   DF_REF_REGNO (this_ref) =  regno;
2835   DF_REF_TYPE (this_ref) = ref_type;
2836   DF_REF_INSN_INFO (this_ref) = info;
2837   DF_REF_CHAIN (this_ref) = NULL;
2838   DF_REF_FLAGS (this_ref) = ref_flags;
2839   DF_REF_NEXT_REG (this_ref) = NULL;
2840   DF_REF_PREV_REG (this_ref) = NULL;
2841   DF_REF_ORDER (this_ref) = df->ref_order++;
2842 
2843   /* We need to clear this bit because fwprop, and in the future
2844      possibly other optimizations sometimes create new refs using ond
2845      refs as the model.  */
2846   DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2847 
2848   /* See if this ref needs to have DF_HARD_REG_LIVE bit set.  */
2849   if ((regno < FIRST_PSEUDO_REGISTER)
2850       && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2851     {
2852       if (DF_REF_REG_DEF_P (this_ref))
2853 	{
2854 	  if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2855 	    DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2856 	}
2857       else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2858 		 && (regno == FRAME_POINTER_REGNUM
2859 		     || regno == ARG_POINTER_REGNUM)))
2860 	DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2861     }
2862 
2863   if (collection_rec)
2864     {
2865       if (DF_REF_REG_DEF_P (this_ref))
2866 	VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
2867       else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2868 	VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
2869       else
2870 	VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
2871     }
2872 
2873   return this_ref;
2874 }
2875 
2876 
2877 /* Create new references of type DF_REF_TYPE for each part of register REG
2878    at address LOC within INSN of BB.
2879 
2880    If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2881    DF_REF_ZERO_EXTRACT.  WIDTH, OFFSET and MODE are used to access the
2882    fields if they were constants.  Otherwise they should be -1 if
2883    those flags were set.  */
2884 
2885 
2886 static void
2887 df_ref_record (enum df_ref_class cl,
2888 	       struct df_collection_rec *collection_rec,
2889                rtx reg, rtx *loc,
2890 	       basic_block bb, struct df_insn_info *insn_info,
2891 	       enum df_ref_type ref_type,
2892 	       int ref_flags,
2893 	       int width, int offset, enum machine_mode mode)
2894 {
2895   unsigned int regno;
2896 
2897   gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2898 
2899   regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2900   if (regno < FIRST_PSEUDO_REGISTER)
2901     {
2902       struct df_mw_hardreg *hardreg = NULL;
2903       struct df_scan_problem_data *problem_data
2904         = (struct df_scan_problem_data *) df_scan->problem_data;
2905       unsigned int i;
2906       unsigned int endregno;
2907       df_ref ref;
2908 
2909       if (GET_CODE (reg) == SUBREG)
2910 	{
2911 	  regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2912 					SUBREG_BYTE (reg), GET_MODE (reg));
2913 	  endregno = regno + subreg_nregs (reg);
2914 	}
2915       else
2916 	endregno = END_HARD_REGNO (reg);
2917 
2918       /*  If this is a multiword hardreg, we create some extra
2919 	  datastructures that will enable us to easily build REG_DEAD
2920 	  and REG_UNUSED notes.  */
2921       if ((endregno != regno + 1) && insn_info)
2922 	{
2923 	  /* Sets to a subreg of a multiword register are partial.
2924 	     Sets to a non-subreg of a multiword register are not.  */
2925 	  if (GET_CODE (reg) == SUBREG)
2926 	    ref_flags |= DF_REF_PARTIAL;
2927 	  ref_flags |= DF_REF_MW_HARDREG;
2928 
2929 	  hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2930 	  hardreg->type = ref_type;
2931 	  hardreg->flags = ref_flags;
2932 	  hardreg->mw_reg = reg;
2933 	  hardreg->start_regno = regno;
2934 	  hardreg->end_regno = endregno - 1;
2935 	  hardreg->mw_order = df->ref_order++;
2936 	  VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
2937 			 hardreg);
2938 	}
2939 
2940       for (i = regno; i < endregno; i++)
2941 	{
2942 	  ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2943 					 bb, insn_info, ref_type, ref_flags,
2944 					 width, offset, mode);
2945 
2946           gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2947 	}
2948     }
2949   else
2950     {
2951       df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2952 			       ref_type, ref_flags, width, offset, mode);
2953     }
2954 }
2955 
2956 
2957 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2958    covered by the outer mode is smaller than that covered by the inner mode,
2959    is a read-modify-write operation.
2960    This function returns true iff the SUBREG X is such a SUBREG.  */
2961 
2962 bool
2963 df_read_modify_subreg_p (rtx x)
2964 {
2965   unsigned int isize, osize;
2966   if (GET_CODE (x) != SUBREG)
2967     return false;
2968   isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2969   osize = GET_MODE_SIZE (GET_MODE (x));
2970   return isize > osize
2971 	 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2972 }
2973 
2974 
2975 /* Process all the registers defined in the rtx, X.
2976    Autoincrement/decrement definitions will be picked up by
2977    df_uses_record.  */
2978 
2979 static void
2980 df_def_record_1 (struct df_collection_rec *collection_rec,
2981                  rtx x, basic_block bb, struct df_insn_info *insn_info,
2982 		 int flags)
2983 {
2984   rtx *loc;
2985   rtx dst;
2986   int offset = -1;
2987   int width = -1;
2988   enum machine_mode mode = VOIDmode;
2989   enum df_ref_class cl = DF_REF_REGULAR;
2990 
2991  /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2992      construct.  */
2993   if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2994     loc = &XEXP (x, 0);
2995   else
2996     loc = &SET_DEST (x);
2997   dst = *loc;
2998 
2999   /* It is legal to have a set destination be a parallel. */
3000   if (GET_CODE (dst) == PARALLEL)
3001     {
3002       int i;
3003 
3004       for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
3005 	{
3006 	  rtx temp = XVECEXP (dst, 0, i);
3007 	  if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
3008 	      || GET_CODE (temp) == SET)
3009 	    df_def_record_1 (collection_rec,
3010                              temp, bb, insn_info,
3011 			     GET_CODE (temp) == CLOBBER
3012 			     ? flags | DF_REF_MUST_CLOBBER : flags);
3013 	}
3014       return;
3015     }
3016 
3017   if (GET_CODE (dst) == STRICT_LOW_PART)
3018     {
3019       flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
3020 
3021       loc = &XEXP (dst, 0);
3022       dst = *loc;
3023     }
3024 
3025   if (GET_CODE (dst) == ZERO_EXTRACT)
3026     {
3027       flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
3028 
3029       if (CONST_INT_P (XEXP (dst, 1))
3030 	  && CONST_INT_P (XEXP (dst, 2)))
3031 	{
3032 	  width = INTVAL (XEXP (dst, 1));
3033 	  offset = INTVAL (XEXP (dst, 2));
3034 	  mode = GET_MODE (dst);
3035 	  cl = DF_REF_EXTRACT;
3036 	}
3037 
3038       loc = &XEXP (dst, 0);
3039       dst = *loc;
3040     }
3041 
3042   /* At this point if we do not have a reg or a subreg, just return.  */
3043   if (REG_P (dst))
3044     {
3045       df_ref_record (cl, collection_rec,
3046 		     dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
3047 		     width, offset, mode);
3048 
3049       /* We want to keep sp alive everywhere - by making all
3050 	 writes to sp also use of sp. */
3051       if (REGNO (dst) == STACK_POINTER_REGNUM)
3052 	df_ref_record (DF_REF_BASE, collection_rec,
3053 		       dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
3054 		       width, offset, mode);
3055     }
3056   else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
3057     {
3058       if (df_read_modify_subreg_p (dst))
3059 	flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
3060 
3061       flags |= DF_REF_SUBREG;
3062 
3063       df_ref_record (cl, collection_rec,
3064 		     dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
3065 		     width, offset, mode);
3066     }
3067 }
3068 
3069 
3070 /* Process all the registers defined in the pattern rtx, X.  */
3071 
3072 static void
3073 df_defs_record (struct df_collection_rec *collection_rec,
3074                 rtx x, basic_block bb, struct df_insn_info *insn_info,
3075 		int flags)
3076 {
3077   RTX_CODE code = GET_CODE (x);
3078 
3079   if (code == SET || code == CLOBBER)
3080     {
3081       /* Mark the single def within the pattern.  */
3082       int clobber_flags = flags;
3083       clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
3084       df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
3085     }
3086   else if (code == COND_EXEC)
3087     {
3088       df_defs_record (collection_rec, COND_EXEC_CODE (x),
3089 		      bb, insn_info, DF_REF_CONDITIONAL);
3090     }
3091   else if (code == PARALLEL)
3092     {
3093       int i;
3094 
3095       /* Mark the multiple defs within the pattern.  */
3096       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3097 	df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
3098     }
3099 }
3100 
3101 
3102 /* Process all the registers used in the rtx at address LOC.
3103 
3104    If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
3105    DF_REF_ZERO_EXTRACT.  WIDTH, OFFSET and MODE are used to access the
3106    fields if they were constants.  Otherwise they should be -1 if
3107    those flags were set.  */
3108 
3109 static void
3110 df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
3111                 rtx *loc, enum df_ref_type ref_type,
3112 		basic_block bb, struct df_insn_info *insn_info,
3113 		int flags,
3114 		int width, int offset, enum machine_mode mode)
3115 {
3116   RTX_CODE code;
3117   rtx x;
3118 
3119  retry:
3120   x = *loc;
3121   if (!x)
3122     return;
3123   code = GET_CODE (x);
3124   switch (code)
3125     {
3126     case LABEL_REF:
3127     case SYMBOL_REF:
3128     case CONST_INT:
3129     case CONST:
3130     case CONST_DOUBLE:
3131     case CONST_FIXED:
3132     case CONST_VECTOR:
3133     case PC:
3134     case CC0:
3135     case ADDR_VEC:
3136     case ADDR_DIFF_VEC:
3137       return;
3138 
3139     case CLOBBER:
3140       /* If we are clobbering a MEM, mark any registers inside the address
3141 	 as being used.  */
3142       if (MEM_P (XEXP (x, 0)))
3143 	df_uses_record (cl, collection_rec,
3144 			&XEXP (XEXP (x, 0), 0),
3145 			DF_REF_REG_MEM_STORE,
3146 		        bb, insn_info,
3147 			flags, width, offset, mode);
3148 
3149       /* If we're clobbering a REG then we have a def so ignore.  */
3150       return;
3151 
3152     case MEM:
3153       df_uses_record (cl, collection_rec,
3154 		      &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3155 		      bb, insn_info, flags & DF_REF_IN_NOTE,
3156 		      width, offset, mode);
3157       return;
3158 
3159     case SUBREG:
3160       /* While we're here, optimize this case.  */
3161       flags |= DF_REF_PARTIAL;
3162       /* In case the SUBREG is not of a REG, do not optimize.  */
3163       if (!REG_P (SUBREG_REG (x)))
3164 	{
3165 	  loc = &SUBREG_REG (x);
3166 	  df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
3167 			  width, offset, mode);
3168 	  return;
3169 	}
3170       /* ... Fall through ...  */
3171 
3172     case REG:
3173       df_ref_record (cl, collection_rec,
3174 		     x, loc, bb, insn_info,
3175 		     ref_type, flags,
3176 		     width, offset, mode);
3177       return;
3178 
3179     case SIGN_EXTRACT:
3180     case ZERO_EXTRACT:
3181       {
3182 	/* If the parameters to the zero or sign extract are
3183 	   constants, strip them off and recurse, otherwise there is
3184 	   no information that we can gain from this operation.  */
3185 	if (CONST_INT_P (XEXP (x, 1))
3186 	    && CONST_INT_P (XEXP (x, 2)))
3187 	  {
3188 	    width = INTVAL (XEXP (x, 1));
3189 	    offset = INTVAL (XEXP (x, 2));
3190 	    mode = GET_MODE (x);
3191 
3192 	    if (code == ZERO_EXTRACT)
3193 	      flags |= DF_REF_ZERO_EXTRACT;
3194 	    else
3195 	      flags |= DF_REF_SIGN_EXTRACT;
3196 
3197 	    df_uses_record (DF_REF_EXTRACT, collection_rec,
3198 			    &XEXP (x, 0), ref_type, bb, insn_info, flags,
3199 			    width, offset, mode);
3200 	    return;
3201 	  }
3202       }
3203       break;
3204 
3205     case SET:
3206       {
3207 	rtx dst = SET_DEST (x);
3208 	gcc_assert (!(flags & DF_REF_IN_NOTE));
3209 	df_uses_record (cl, collection_rec,
3210 			&SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
3211 			width, offset, mode);
3212 
3213 	switch (GET_CODE (dst))
3214 	  {
3215 	    case SUBREG:
3216 	      if (df_read_modify_subreg_p (dst))
3217 		{
3218 		  df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
3219 				  DF_REF_REG_USE, bb, insn_info,
3220 				  flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
3221 				  width, offset, mode);
3222 		  break;
3223 		}
3224 	      /* Fall through.  */
3225 	    case REG:
3226 	    case PARALLEL:
3227 	    case SCRATCH:
3228 	    case PC:
3229 	    case CC0:
3230 		break;
3231 	    case MEM:
3232 	      df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3233 			      DF_REF_REG_MEM_STORE, bb, insn_info, flags,
3234 			      width, offset, mode);
3235 	      break;
3236 	    case STRICT_LOW_PART:
3237 	      {
3238 		rtx *temp = &XEXP (dst, 0);
3239 		/* A strict_low_part uses the whole REG and not just the
3240 		 SUBREG.  */
3241 		dst = XEXP (dst, 0);
3242 		df_uses_record (cl, collection_rec,
3243 				(GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3244 				DF_REF_REG_USE, bb, insn_info,
3245 				DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
3246 				width, offset, mode);
3247 	      }
3248 	      break;
3249 	    case ZERO_EXTRACT:
3250 	      {
3251 		if (CONST_INT_P (XEXP (dst, 1))
3252 		    && CONST_INT_P (XEXP (dst, 2)))
3253 		  {
3254 		    width = INTVAL (XEXP (dst, 1));
3255 		    offset = INTVAL (XEXP (dst, 2));
3256 		    mode = GET_MODE (dst);
3257 		    if (GET_CODE (XEXP (dst,0)) == MEM)
3258 		      {
3259 			/* Handle the case of zero_extract(mem(...)) in the set dest.
3260 			   This special case is allowed only if the mem is a single byte and
3261 			   is useful to set a bitfield in memory.  */
3262 			df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (XEXP (dst,0), 0),
3263 					DF_REF_REG_MEM_STORE, bb, insn_info,
3264 					DF_REF_ZERO_EXTRACT,
3265 					width, offset, mode);
3266 		      }
3267 		    else
3268 		      {
3269 			df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
3270 					DF_REF_REG_USE, bb, insn_info,
3271 					DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3272 					width, offset, mode);
3273 		      }
3274 		  }
3275 		else
3276 		  {
3277 		    df_uses_record (cl, collection_rec, &XEXP (dst, 1),
3278 				    DF_REF_REG_USE, bb, insn_info, flags,
3279 				    width, offset, mode);
3280 		    df_uses_record (cl, collection_rec, &XEXP (dst, 2),
3281 				    DF_REF_REG_USE, bb, insn_info, flags,
3282 				    width, offset, mode);
3283 		    df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3284 				    DF_REF_REG_USE, bb, insn_info,
3285 				    DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3286 				    width, offset, mode);
3287 		  }
3288 
3289 	      }
3290 	      break;
3291 
3292 	    default:
3293 	      gcc_unreachable ();
3294 	  }
3295 	return;
3296       }
3297 
3298     case RETURN:
3299       break;
3300 
3301     case ASM_OPERANDS:
3302     case UNSPEC_VOLATILE:
3303     case TRAP_IF:
3304     case ASM_INPUT:
3305       {
3306 	/* Traditional and volatile asm instructions must be
3307 	   considered to use and clobber all hard registers, all
3308 	   pseudo-registers and all of memory.  So must TRAP_IF and
3309 	   UNSPEC_VOLATILE operations.
3310 
3311 	   Consider for instance a volatile asm that changes the fpu
3312 	   rounding mode.  An insn should not be moved across this
3313 	   even if it only uses pseudo-regs because it might give an
3314 	   incorrectly rounded result.
3315 
3316 	   However, flow.c's liveness computation did *not* do this,
3317 	   giving the reasoning as " ?!? Unfortunately, marking all
3318 	   hard registers as live causes massive problems for the
3319 	   register allocator and marking all pseudos as live creates
3320 	   mountains of uninitialized variable warnings."
3321 
3322 	   In order to maintain the status quo with regard to liveness
3323 	   and uses, we do what flow.c did and just mark any regs we
3324 	   can find in ASM_OPERANDS as used.  In global asm insns are
3325 	   scanned and regs_asm_clobbered is filled out.
3326 
3327 	   For all ASM_OPERANDS, we must traverse the vector of input
3328 	   operands.  We can not just fall through here since then we
3329 	   would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3330 	   which do not indicate traditional asms unlike their normal
3331 	   usage.  */
3332 	if (code == ASM_OPERANDS)
3333 	  {
3334 	    int j;
3335 
3336 	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3337 	      df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
3338 			      DF_REF_REG_USE, bb, insn_info, flags,
3339 			      width, offset, mode);
3340 	    return;
3341 	  }
3342 	break;
3343       }
3344 
3345     case VAR_LOCATION:
3346       df_uses_record (cl, collection_rec,
3347 		      &PAT_VAR_LOCATION_LOC (x),
3348 		      DF_REF_REG_USE, bb, insn_info,
3349 		      flags, width, offset, mode);
3350       return;
3351 
3352     case PRE_DEC:
3353     case POST_DEC:
3354     case PRE_INC:
3355     case POST_INC:
3356     case PRE_MODIFY:
3357     case POST_MODIFY:
3358       gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3359       /* Catch the def of the register being modified.  */
3360       df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3361 		     bb, insn_info,
3362 		     DF_REF_REG_DEF,
3363                      flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
3364 		     width, offset, mode);
3365 
3366       /* ... Fall through to handle uses ...  */
3367 
3368     default:
3369       break;
3370     }
3371 
3372   /* Recursively scan the operands of this expression.  */
3373   {
3374     const char *fmt = GET_RTX_FORMAT (code);
3375     int i;
3376 
3377     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3378       {
3379 	if (fmt[i] == 'e')
3380 	  {
3381 	    /* Tail recursive case: save a function call level.  */
3382 	    if (i == 0)
3383 	      {
3384 		loc = &XEXP (x, 0);
3385 		goto retry;
3386 	      }
3387 	    df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
3388 			    bb, insn_info, flags,
3389 			    width, offset, mode);
3390 	  }
3391 	else if (fmt[i] == 'E')
3392 	  {
3393 	    int j;
3394 	    for (j = 0; j < XVECLEN (x, i); j++)
3395 	      df_uses_record (cl, collection_rec,
3396 			      &XVECEXP (x, i, j), ref_type,
3397 			      bb, insn_info, flags,
3398 			      width, offset, mode);
3399 	  }
3400       }
3401   }
3402 
3403   return;
3404 }
3405 
3406 
3407 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses.  */
3408 
3409 static void
3410 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3411 {
3412   unsigned int ix;
3413   df_ref ref;
3414 
3415   for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
3416     {
3417       if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3418         {
3419 	  int width = -1;
3420 	  int offset = -1;
3421 	  enum machine_mode mode = VOIDmode;
3422           df_ref use;
3423 
3424 	  if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3425 	    {
3426 	      width = DF_REF_EXTRACT_WIDTH (ref);
3427 	      offset = DF_REF_EXTRACT_OFFSET (ref);
3428 	      mode = DF_REF_EXTRACT_MODE (ref);
3429 	    }
3430 
3431           use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3432 					 DF_REF_LOC (ref), DF_REF_BB (ref),
3433 					 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3434 					 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
3435 					 width, offset, mode);
3436           DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3437         }
3438     }
3439 }
3440 
3441 
3442 /* Get call's extra defs and uses. */
3443 
3444 static void
3445 df_get_call_refs (struct df_collection_rec * collection_rec,
3446                   basic_block bb,
3447                   struct df_insn_info *insn_info,
3448                   int flags)
3449 {
3450   rtx note;
3451   bitmap_iterator bi;
3452   unsigned int ui;
3453   bool is_sibling_call;
3454   unsigned int i;
3455   df_ref def;
3456   bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3457 
3458   /* Do not generate clobbers for registers that are the result of the
3459      call.  This causes ordering problems in the chain building code
3460      depending on which def is seen first.  */
3461   for (i = 0; VEC_iterate (df_ref, collection_rec->def_vec, i, def); ++i)
3462     bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3463 
3464   /* Record the registers used to pass arguments, and explicitly
3465      noted as clobbered.  */
3466   for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3467        note = XEXP (note, 1))
3468     {
3469       if (GET_CODE (XEXP (note, 0)) == USE)
3470         df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
3471 			DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3472 			VOIDmode);
3473       else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3474 	{
3475 	  if (REG_P (XEXP (XEXP (note, 0), 0)))
3476 	    {
3477 	      unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3478 	      if (!bitmap_bit_p (defs_generated, regno))
3479 		df_defs_record (collection_rec, XEXP (note, 0), bb,
3480 				insn_info, flags);
3481 	    }
3482 	  else
3483 	    df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
3484 		            DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3485 			    VOIDmode);
3486 	}
3487     }
3488 
3489   /* The stack ptr is used (honorarily) by a CALL insn.  */
3490   df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3491 		 NULL, bb, insn_info, DF_REF_REG_USE,
3492 		 DF_REF_CALL_STACK_USAGE | flags,
3493 		 -1, -1, VOIDmode);
3494 
3495   /* Calls may also reference any of the global registers,
3496      so they are recorded as used.  */
3497   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3498     if (global_regs[i])
3499       {
3500 	df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3501 		       NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1,
3502 		       VOIDmode);
3503 	df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3504 		       NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1,
3505 		       VOIDmode);
3506       }
3507 
3508   is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3509   EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
3510     {
3511       if (!global_regs[ui]
3512 	  && (!bitmap_bit_p (defs_generated, ui))
3513 	  && (!is_sibling_call
3514 	      || !bitmap_bit_p (df->exit_block_uses, ui)
3515 	      || refers_to_regno_p (ui, ui+1,
3516 				    crtl->return_rtx, NULL)))
3517         df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
3518 		       NULL, bb, insn_info, DF_REF_REG_DEF,
3519 		       DF_REF_MAY_CLOBBER | flags,
3520 		       -1, -1, VOIDmode);
3521     }
3522 
3523   BITMAP_FREE (defs_generated);
3524   return;
3525 }
3526 
3527 /* Collect all refs in the INSN. This function is free of any
3528    side-effect - it will create and return a lists of df_ref's in the
3529    COLLECTION_REC without putting those refs into existing ref chains
3530    and reg chains. */
3531 
3532 static void
3533 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3534 		      basic_block bb, struct df_insn_info *insn_info)
3535 {
3536   rtx note;
3537   bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3538 
3539   /* Clear out the collection record.  */
3540   VEC_truncate (df_ref, collection_rec->def_vec, 0);
3541   VEC_truncate (df_ref, collection_rec->use_vec, 0);
3542   VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3543   VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3544 
3545   /* Record register defs.  */
3546   df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3547 
3548   /* Process REG_EQUIV/REG_EQUAL notes.  */
3549   for (note = REG_NOTES (insn_info->insn); note;
3550        note = XEXP (note, 1))
3551     {
3552       switch (REG_NOTE_KIND (note))
3553         {
3554         case REG_EQUIV:
3555         case REG_EQUAL:
3556           df_uses_record (DF_REF_REGULAR, collection_rec,
3557                           &XEXP (note, 0), DF_REF_REG_USE,
3558                           bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
3559           break;
3560         case REG_NON_LOCAL_GOTO:
3561           /* The frame ptr is used by a non-local goto.  */
3562           df_ref_record (DF_REF_BASE, collection_rec,
3563                          regno_reg_rtx[FRAME_POINTER_REGNUM],
3564                          NULL, bb, insn_info,
3565                          DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3566 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3567           df_ref_record (DF_REF_BASE, collection_rec,
3568                          regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3569                          NULL, bb, insn_info,
3570                          DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3571 #endif
3572           break;
3573         default:
3574           break;
3575         }
3576     }
3577 
3578   if (CALL_P (insn_info->insn))
3579     df_get_call_refs (collection_rec, bb, insn_info,
3580 		      (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3581 
3582   /* Record the register uses.  */
3583   df_uses_record (DF_REF_REGULAR, collection_rec,
3584 		  &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
3585 		  -1, -1, VOIDmode);
3586 
3587   /* DF_REF_CONDITIONAL needs corresponding USES. */
3588   if (is_cond_exec)
3589     df_get_conditional_uses (collection_rec);
3590 
3591   df_canonize_collection_rec (collection_rec);
3592 }
3593 
3594 /* Recompute the luids for the insns in BB.  */
3595 
3596 void
3597 df_recompute_luids (basic_block bb)
3598 {
3599   rtx insn;
3600   int luid = 0;
3601 
3602   df_grow_insn_info ();
3603 
3604   /* Scan the block an insn at a time from beginning to end.  */
3605   FOR_BB_INSNS (bb, insn)
3606     {
3607       struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3608       /* Inserting labels does not always trigger the incremental
3609 	 rescanning.  */
3610       if (!insn_info)
3611 	{
3612 	  gcc_assert (!INSN_P (insn));
3613 	  insn_info = df_insn_create_insn_record (insn);
3614 	}
3615 
3616       DF_INSN_INFO_LUID (insn_info) = luid;
3617       if (INSN_P (insn))
3618 	luid++;
3619     }
3620 }
3621 
3622 
3623 /* Collect all artificial refs at the block level for BB and add them
3624    to COLLECTION_REC.  */
3625 
3626 static void
3627 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3628 {
3629   VEC_truncate (df_ref, collection_rec->def_vec, 0);
3630   VEC_truncate (df_ref, collection_rec->use_vec, 0);
3631   VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3632   VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3633 
3634   if (bb->index == ENTRY_BLOCK)
3635     {
3636       df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3637       return;
3638     }
3639   else if (bb->index == EXIT_BLOCK)
3640     {
3641       df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3642       return;
3643     }
3644 
3645 #ifdef EH_RETURN_DATA_REGNO
3646   if (bb_has_eh_pred (bb))
3647     {
3648       unsigned int i;
3649       /* Mark the registers that will contain data for the handler.  */
3650       for (i = 0; ; ++i)
3651 	{
3652 	  unsigned regno = EH_RETURN_DATA_REGNO (i);
3653 	  if (regno == INVALID_REGNUM)
3654 	    break;
3655 	  df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3656 			 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1,
3657 			 VOIDmode);
3658 	}
3659     }
3660 #endif
3661 
3662   /* Add the hard_frame_pointer if this block is the target of a
3663      non-local goto.  */
3664   if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3665     df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3666 		   bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, VOIDmode);
3667 
3668   /* Add the artificial uses.  */
3669   if (bb->index >= NUM_FIXED_BLOCKS)
3670     {
3671       bitmap_iterator bi;
3672       unsigned int regno;
3673       bitmap au = bb_has_eh_pred (bb)
3674 	? df->eh_block_artificial_uses
3675 	: df->regular_block_artificial_uses;
3676 
3677       EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3678 	{
3679 	  df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3680 			 bb, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3681 	}
3682     }
3683 
3684   df_canonize_collection_rec (collection_rec);
3685 }
3686 
3687 
3688 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS.  */
3689 
3690 void
3691 df_bb_refs_record (int bb_index, bool scan_insns)
3692 {
3693   basic_block bb = BASIC_BLOCK (bb_index);
3694   rtx insn;
3695   int luid = 0;
3696   struct df_scan_bb_info *bb_info;
3697   struct df_collection_rec collection_rec;
3698 
3699   if (!df)
3700     return;
3701 
3702   bb_info = df_scan_get_bb_info (bb_index);
3703 
3704   /* Need to make sure that there is a record in the basic block info. */
3705   if (!bb_info)
3706     {
3707       bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3708       df_scan_set_bb_info (bb_index, bb_info);
3709       bb_info->artificial_defs = NULL;
3710       bb_info->artificial_uses = NULL;
3711     }
3712 
3713   collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
3714   collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
3715   collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
3716   collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3717 
3718   if (scan_insns)
3719     /* Scan the block an insn at a time from beginning to end.  */
3720     FOR_BB_INSNS (bb, insn)
3721       {
3722 	struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3723 	gcc_assert (!insn_info);
3724 
3725 	insn_info = df_insn_create_insn_record (insn);
3726 	if (INSN_P (insn))
3727 	  {
3728 	    /* Record refs within INSN.  */
3729 	    DF_INSN_INFO_LUID (insn_info) = luid++;
3730 	    df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3731 	    df_refs_add_to_chains (&collection_rec, bb, insn);
3732 	  }
3733 	DF_INSN_INFO_LUID (insn_info) = luid;
3734       }
3735 
3736   /* Other block level artificial refs */
3737   df_bb_refs_collect (&collection_rec, bb);
3738   df_refs_add_to_chains (&collection_rec, bb, NULL);
3739 
3740   VEC_free (df_ref, stack, collection_rec.def_vec);
3741   VEC_free (df_ref, stack, collection_rec.use_vec);
3742   VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3743   VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3744 
3745   /* Now that the block has been processed, set the block as dirty so
3746      LR and LIVE will get it processed.  */
3747   df_set_bb_dirty (bb);
3748 }
3749 
3750 
3751 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3752    block. */
3753 
3754 static void
3755 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3756 {
3757 #ifdef EH_USES
3758   unsigned int i;
3759 #endif
3760 
3761   bitmap_clear (regular_block_artificial_uses);
3762 
3763   if (reload_completed)
3764     {
3765       if (frame_pointer_needed)
3766 	bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3767     }
3768   else
3769     /* Before reload, there are a few registers that must be forced
3770        live everywhere -- which might not already be the case for
3771        blocks within infinite loops.  */
3772     {
3773       /* Any reference to any pseudo before reload is a potential
3774 	 reference of the frame pointer.  */
3775       bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3776 
3777 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3778       bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3779 #endif
3780 
3781 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3782       /* Pseudos with argument area equivalences may require
3783 	 reloading via the argument pointer.  */
3784       if (fixed_regs[ARG_POINTER_REGNUM])
3785 	bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3786 #endif
3787 
3788       /* Any constant, or pseudo with constant equivalences, may
3789 	 require reloading from memory using the pic register.  */
3790       if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3791 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3792 	bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3793     }
3794   /* The all-important stack pointer must always be live.  */
3795   bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3796 
3797 #ifdef EH_USES
3798   /* EH_USES registers are used:
3799      1) at all insns that might throw (calls or with -fnon-call-exceptions
3800 	trapping insns)
3801      2) in all EH edges
3802      3) to support backtraces and/or debugging, anywhere between their
3803 	initialization and where they the saved registers are restored
3804 	from them, including the cases where we don't reach the epilogue
3805 	(noreturn call or infinite loop).  */
3806   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3807     if (EH_USES (i))
3808       bitmap_set_bit (regular_block_artificial_uses, i);
3809 #endif
3810 }
3811 
3812 
3813 /* Get the artificial use set for an eh block. */
3814 
3815 static void
3816 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3817 {
3818   bitmap_clear (eh_block_artificial_uses);
3819 
3820   /* The following code (down thru the arg_pointer setting APPEARS
3821      to be necessary because there is nothing that actually
3822      describes what the exception handling code may actually need
3823      to keep alive.  */
3824   if (reload_completed)
3825     {
3826       if (frame_pointer_needed)
3827 	{
3828 	  bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3829 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3830 	  bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3831 #endif
3832 	}
3833 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3834       if (fixed_regs[ARG_POINTER_REGNUM])
3835 	bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3836 #endif
3837     }
3838 }
3839 
3840 
3841 
3842 /*----------------------------------------------------------------------------
3843    Specialized hard register scanning functions.
3844 ----------------------------------------------------------------------------*/
3845 
3846 
3847 /* Mark a register in SET.  Hard registers in large modes get all
3848    of their component registers set as well.  */
3849 
3850 static void
3851 df_mark_reg (rtx reg, void *vset)
3852 {
3853   bitmap set = (bitmap) vset;
3854   int regno = REGNO (reg);
3855 
3856   gcc_assert (GET_MODE (reg) != BLKmode);
3857 
3858   if (regno < FIRST_PSEUDO_REGISTER)
3859     {
3860       int n = hard_regno_nregs[regno][GET_MODE (reg)];
3861       bitmap_set_range (set, regno, n);
3862     }
3863   else
3864     bitmap_set_bit (set, regno);
3865 }
3866 
3867 
3868 /* Set the bit for regs that are considered being defined at the entry. */
3869 
3870 static void
3871 df_get_entry_block_def_set (bitmap entry_block_defs)
3872 {
3873   rtx r;
3874   int i;
3875 
3876   bitmap_clear (entry_block_defs);
3877 
3878   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3879     {
3880       if (FUNCTION_ARG_REGNO_P (i))
3881 #ifdef INCOMING_REGNO
3882 	bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3883 #else
3884 	bitmap_set_bit (entry_block_defs, i);
3885 #endif
3886     }
3887 
3888   /* The always important stack pointer.  */
3889   bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3890 
3891   /* Once the prologue has been generated, all of these registers
3892      should just show up in the first regular block.  */
3893   if (HAVE_prologue && epilogue_completed)
3894     {
3895       /* Defs for the callee saved registers are inserted so that the
3896 	 pushes have some defining location.  */
3897       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3898 	if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3899 	  bitmap_set_bit (entry_block_defs, i);
3900     }
3901 
3902   r = targetm.calls.struct_value_rtx (current_function_decl, true);
3903   if (r && REG_P (r))
3904     bitmap_set_bit (entry_block_defs, REGNO (r));
3905 
3906   /* If the function has an incoming STATIC_CHAIN, it has to show up
3907      in the entry def set.  */
3908   r = targetm.calls.static_chain (current_function_decl, true);
3909   if (r && REG_P (r))
3910     bitmap_set_bit (entry_block_defs, REGNO (r));
3911 
3912   if ((!reload_completed) || frame_pointer_needed)
3913     {
3914       /* Any reference to any pseudo before reload is a potential
3915 	 reference of the frame pointer.  */
3916       bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3917 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3918       /* If they are different, also mark the hard frame pointer as live.  */
3919       if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3920 	bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3921 #endif
3922     }
3923 
3924   /* These registers are live everywhere.  */
3925   if (!reload_completed)
3926     {
3927 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3928       /* Pseudos with argument area equivalences may require
3929 	 reloading via the argument pointer.  */
3930       if (fixed_regs[ARG_POINTER_REGNUM])
3931 	bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3932 #endif
3933 
3934 #ifdef PIC_OFFSET_TABLE_REGNUM
3935       /* Any constant, or pseudo with constant equivalences, may
3936 	 require reloading from memory using the pic register.  */
3937       if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3938 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3939 	bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3940 #endif
3941     }
3942 
3943 #ifdef INCOMING_RETURN_ADDR_RTX
3944   if (REG_P (INCOMING_RETURN_ADDR_RTX))
3945     bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3946 #endif
3947 
3948   targetm.live_on_entry (entry_block_defs);
3949 }
3950 
3951 
3952 /* Return the (conservative) set of hard registers that are defined on
3953    entry to the function.
3954    It uses df->entry_block_defs to determine which register
3955    reference to include.  */
3956 
3957 static void
3958 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3959 			     bitmap entry_block_defs)
3960 {
3961   unsigned int i;
3962   bitmap_iterator bi;
3963 
3964   EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3965     {
3966       df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3967 		     ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1,
3968 		     VOIDmode);
3969     }
3970 
3971   df_canonize_collection_rec (collection_rec);
3972 }
3973 
3974 
3975 /* Record the (conservative) set of hard registers that are defined on
3976    entry to the function.  */
3977 
3978 static void
3979 df_record_entry_block_defs (bitmap entry_block_defs)
3980 {
3981   struct df_collection_rec collection_rec;
3982   memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3983   collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3984   df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3985 
3986   /* Process bb_refs chain */
3987   df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3988   VEC_free (df_ref, stack, collection_rec.def_vec);
3989 }
3990 
3991 
3992 /* Update the defs in the entry block.  */
3993 
3994 void
3995 df_update_entry_block_defs (void)
3996 {
3997   bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3998   bool changed = false;
3999 
4000   df_get_entry_block_def_set (refs);
4001   if (df->entry_block_defs)
4002     {
4003       if (!bitmap_equal_p (df->entry_block_defs, refs))
4004 	{
4005 	  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
4006 	  df_ref_chain_delete_du_chain (bb_info->artificial_defs);
4007 	  df_ref_chain_delete (bb_info->artificial_defs);
4008 	  bb_info->artificial_defs = NULL;
4009 	  changed = true;
4010 	}
4011     }
4012   else
4013     {
4014       struct df_scan_problem_data *problem_data
4015 	= (struct df_scan_problem_data *) df_scan->problem_data;
4016       df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4017       changed = true;
4018     }
4019 
4020   if (changed)
4021     {
4022       df_record_entry_block_defs (refs);
4023       bitmap_copy (df->entry_block_defs, refs);
4024       df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
4025     }
4026   BITMAP_FREE (refs);
4027 }
4028 
4029 
4030 /* Set the bit for regs that are considered being used at the exit. */
4031 
4032 static void
4033 df_get_exit_block_use_set (bitmap exit_block_uses)
4034 {
4035   unsigned int i;
4036 
4037   bitmap_clear (exit_block_uses);
4038 
4039   /* Stack pointer is always live at the exit.  */
4040   bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
4041 
4042   /* Mark the frame pointer if needed at the end of the function.
4043      If we end up eliminating it, it will be removed from the live
4044      list of each basic block by reload.  */
4045 
4046   if ((!reload_completed) || frame_pointer_needed)
4047     {
4048       bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
4049 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4050       /* If they are different, also mark the hard frame pointer as live.  */
4051       if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4052 	bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
4053 #endif
4054     }
4055 
4056 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4057   /* Many architectures have a GP register even without flag_pic.
4058      Assume the pic register is not in use, or will be handled by
4059      other means, if it is not fixed.  */
4060   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4061       && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4062     bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
4063 #endif
4064 
4065   /* Mark all global registers, and all registers used by the
4066      epilogue as being live at the end of the function since they
4067      may be referenced by our caller.  */
4068   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4069     if (global_regs[i] || EPILOGUE_USES (i))
4070       bitmap_set_bit (exit_block_uses, i);
4071 
4072   if (HAVE_epilogue && epilogue_completed)
4073     {
4074       /* Mark all call-saved registers that we actually used.  */
4075       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4076 	if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
4077 	    && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4078 	  bitmap_set_bit (exit_block_uses, i);
4079     }
4080 
4081 #ifdef EH_RETURN_DATA_REGNO
4082   /* Mark the registers that will contain data for the handler.  */
4083   if (reload_completed && crtl->calls_eh_return)
4084     for (i = 0; ; ++i)
4085       {
4086 	unsigned regno = EH_RETURN_DATA_REGNO (i);
4087 	if (regno == INVALID_REGNUM)
4088 	  break;
4089 	bitmap_set_bit (exit_block_uses, regno);
4090       }
4091 #endif
4092 
4093 #ifdef EH_RETURN_STACKADJ_RTX
4094   if ((!HAVE_epilogue || ! epilogue_completed)
4095       && crtl->calls_eh_return)
4096     {
4097       rtx tmp = EH_RETURN_STACKADJ_RTX;
4098       if (tmp && REG_P (tmp))
4099 	df_mark_reg (tmp, exit_block_uses);
4100     }
4101 #endif
4102 
4103 #ifdef EH_RETURN_HANDLER_RTX
4104   if ((!HAVE_epilogue || ! epilogue_completed)
4105       && crtl->calls_eh_return)
4106     {
4107       rtx tmp = EH_RETURN_HANDLER_RTX;
4108       if (tmp && REG_P (tmp))
4109 	df_mark_reg (tmp, exit_block_uses);
4110     }
4111 #endif
4112 
4113   /* Mark function return value.  */
4114   diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4115 }
4116 
4117 
4118 /* Return the refs of hard registers that are used in the exit block.
4119    It uses df->exit_block_uses to determine register to include.  */
4120 
4121 static void
4122 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4123 {
4124   unsigned int i;
4125   bitmap_iterator bi;
4126 
4127   EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
4128     df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
4129 		   EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
4130 
4131 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4132   /* It is deliberate that this is not put in the exit block uses but
4133      I do not know why.  */
4134   if (reload_completed
4135       && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
4136       && bb_has_eh_pred (EXIT_BLOCK_PTR)
4137       && fixed_regs[ARG_POINTER_REGNUM])
4138     df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
4139 		   EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
4140 #endif
4141 
4142   df_canonize_collection_rec (collection_rec);
4143 }
4144 
4145 
4146 /* Record the set of hard registers that are used in the exit block.
4147    It uses df->exit_block_uses to determine which bit to include.  */
4148 
4149 static void
4150 df_record_exit_block_uses (bitmap exit_block_uses)
4151 {
4152   struct df_collection_rec collection_rec;
4153   memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4154   collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
4155 
4156   df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4157 
4158   /* Process bb_refs chain */
4159   df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
4160   VEC_free (df_ref, stack, collection_rec.use_vec);
4161 }
4162 
4163 
4164 /* Update the uses in the exit block.  */
4165 
4166 void
4167 df_update_exit_block_uses (void)
4168 {
4169   bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
4170   bool changed = false;
4171 
4172   df_get_exit_block_use_set (refs);
4173   if (df->exit_block_uses)
4174     {
4175       if (!bitmap_equal_p (df->exit_block_uses, refs))
4176 	{
4177 	  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4178 	  df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4179 	  df_ref_chain_delete (bb_info->artificial_uses);
4180 	  bb_info->artificial_uses = NULL;
4181 	  changed = true;
4182 	}
4183     }
4184   else
4185     {
4186       struct df_scan_problem_data *problem_data
4187 	= (struct df_scan_problem_data *) df_scan->problem_data;
4188       df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4189       changed = true;
4190     }
4191 
4192   if (changed)
4193     {
4194       df_record_exit_block_uses (refs);
4195       bitmap_copy (df->exit_block_uses, refs);
4196       df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4197     }
4198   BITMAP_FREE (refs);
4199 }
4200 
4201 static bool initialized = false;
4202 
4203 
4204 /* Initialize some platform specific structures.  */
4205 
4206 void
4207 df_hard_reg_init (void)
4208 {
4209 #ifdef ELIMINABLE_REGS
4210   int i;
4211   static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4212 #endif
4213   if (initialized)
4214     return;
4215 
4216   /* Record which registers will be eliminated.  We use this in
4217      mark_used_regs.  */
4218   CLEAR_HARD_REG_SET (elim_reg_set);
4219 
4220 #ifdef ELIMINABLE_REGS
4221   for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4222     SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4223 #else
4224   SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4225 #endif
4226 
4227   initialized = true;
4228 }
4229 
4230 
4231 /* Recompute the parts of scanning that are based on regs_ever_live
4232    because something changed in that array.  */
4233 
4234 void
4235 df_update_entry_exit_and_calls (void)
4236 {
4237   basic_block bb;
4238 
4239   df_update_entry_block_defs ();
4240   df_update_exit_block_uses ();
4241 
4242   /* The call insns need to be rescanned because there may be changes
4243      in the set of registers clobbered across the call.  */
4244   FOR_EACH_BB (bb)
4245     {
4246       rtx insn;
4247       FOR_BB_INSNS (bb, insn)
4248 	{
4249 	  if (INSN_P (insn) && CALL_P (insn))
4250 	    df_insn_rescan (insn);
4251 	}
4252     }
4253 }
4254 
4255 
4256 /* Return true if hard REG is actually used in the some instruction.
4257    There are a fair number of conditions that affect the setting of
4258    this array.  See the comment in df.h for df->hard_regs_live_count
4259    for the conditions that this array is set. */
4260 
4261 bool
4262 df_hard_reg_used_p (unsigned int reg)
4263 {
4264   return df->hard_regs_live_count[reg] != 0;
4265 }
4266 
4267 
4268 /* A count of the number of times REG is actually used in the some
4269    instruction.  There are a fair number of conditions that affect the
4270    setting of this array.  See the comment in df.h for
4271    df->hard_regs_live_count for the conditions that this array is
4272    set. */
4273 
4274 
4275 unsigned int
4276 df_hard_reg_used_count (unsigned int reg)
4277 {
4278   return df->hard_regs_live_count[reg];
4279 }
4280 
4281 
4282 /* Get the value of regs_ever_live[REGNO].  */
4283 
4284 bool
4285 df_regs_ever_live_p (unsigned int regno)
4286 {
4287   return regs_ever_live[regno];
4288 }
4289 
4290 
4291 /* Set regs_ever_live[REGNO] to VALUE.  If this cause regs_ever_live
4292    to change, schedule that change for the next update.  */
4293 
4294 void
4295 df_set_regs_ever_live (unsigned int regno, bool value)
4296 {
4297   if (regs_ever_live[regno] == value)
4298     return;
4299 
4300   regs_ever_live[regno] = value;
4301   if (df)
4302     df->redo_entry_and_exit = true;
4303 }
4304 
4305 
4306 /* Compute "regs_ever_live" information from the underlying df
4307    information.  Set the vector to all false if RESET.  */
4308 
4309 void
4310 df_compute_regs_ever_live (bool reset)
4311 {
4312   unsigned int i;
4313   bool changed = df->redo_entry_and_exit;
4314 
4315   if (reset)
4316     memset (regs_ever_live, 0, sizeof (regs_ever_live));
4317 
4318   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4319     if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4320       {
4321 	regs_ever_live[i] = true;
4322 	changed = true;
4323       }
4324   if (changed)
4325     df_update_entry_exit_and_calls ();
4326   df->redo_entry_and_exit = false;
4327 }
4328 
4329 
4330 /*----------------------------------------------------------------------------
4331   Dataflow ref information verification functions.
4332 
4333   df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4334   df_reg_chain_verify_unmarked (refs)
4335   df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4336   df_mws_verify (mw*, mw*, bool)
4337   df_insn_refs_verify (collection_rec, bb, insn, bool)
4338   df_bb_refs_verify (bb, refs, bool)
4339   df_bb_verify (bb)
4340   df_exit_block_bitmap_verify (bool)
4341   df_entry_block_bitmap_verify (bool)
4342   df_scan_verify ()
4343 ----------------------------------------------------------------------------*/
4344 
4345 
4346 /* Mark all refs in the reg chain.  Verify that all of the registers
4347 are in the correct chain.  */
4348 
4349 static unsigned int
4350 df_reg_chain_mark (df_ref refs, unsigned int regno,
4351 		   bool is_def, bool is_eq_use)
4352 {
4353   unsigned int count = 0;
4354   df_ref ref;
4355   for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4356     {
4357       gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4358 
4359       /* If there are no def-use or use-def chains, make sure that all
4360 	 of the chains are clear.  */
4361       if (!df_chain)
4362 	gcc_assert (!DF_REF_CHAIN (ref));
4363 
4364       /* Check to make sure the ref is in the correct chain.  */
4365       gcc_assert (DF_REF_REGNO (ref) == regno);
4366       if (is_def)
4367 	gcc_assert (DF_REF_REG_DEF_P (ref));
4368       else
4369 	gcc_assert (!DF_REF_REG_DEF_P (ref));
4370 
4371       if (is_eq_use)
4372 	gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4373       else
4374 	gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4375 
4376       if (DF_REF_NEXT_REG (ref))
4377 	gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4378       count++;
4379       DF_REF_REG_MARK (ref);
4380     }
4381   return count;
4382 }
4383 
4384 
4385 /* Verify that all of the registers in the chain are unmarked.  */
4386 
4387 static void
4388 df_reg_chain_verify_unmarked (df_ref refs)
4389 {
4390   df_ref ref;
4391   for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4392     gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4393 }
4394 
4395 
4396 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4397 
4398 static bool
4399 df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
4400 		bool abort_if_fail)
4401 {
4402   unsigned int ix;
4403   df_ref new_ref;
4404 
4405   for (ix = 0; VEC_iterate (df_ref, new_rec, ix, new_ref); ++ix)
4406     {
4407       if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4408 	{
4409 	  if (abort_if_fail)
4410 	    gcc_assert (0);
4411 	  else
4412 	    return false;
4413 	}
4414 
4415       /* Abort if fail is called from the function level verifier.  If
4416 	 that is the context, mark this reg as being seem.  */
4417       if (abort_if_fail)
4418 	{
4419 	  gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4420 	  DF_REF_REG_UNMARK (*old_rec);
4421 	}
4422 
4423       old_rec++;
4424     }
4425 
4426   if (abort_if_fail)
4427     gcc_assert (*old_rec == NULL);
4428   else
4429     return *old_rec == NULL;
4430   return false;
4431 }
4432 
4433 
4434 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4435 
4436 static bool
4437 df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
4438 	       struct df_mw_hardreg **old_rec,
4439 	       bool abort_if_fail)
4440 {
4441   unsigned int ix;
4442   struct df_mw_hardreg *new_reg;
4443 
4444   for (ix = 0; VEC_iterate (df_mw_hardreg_ptr, new_rec, ix, new_reg); ++ix)
4445     {
4446       if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4447 	{
4448 	  if (abort_if_fail)
4449 	    gcc_assert (0);
4450 	  else
4451 	    return false;
4452 	}
4453       old_rec++;
4454     }
4455 
4456   if (abort_if_fail)
4457     gcc_assert (*old_rec == NULL);
4458   else
4459     return *old_rec == NULL;
4460   return false;
4461 }
4462 
4463 
4464 /* Return true if the existing insn refs information is complete and
4465    correct. Otherwise (i.e. if there's any missing or extra refs),
4466    return the correct df_ref chain in REFS_RETURN.
4467 
4468    If ABORT_IF_FAIL, leave the refs that are verified (already in the
4469    ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4470    verification mode instead of the whole function, so unmark
4471    everything.
4472 
4473    If ABORT_IF_FAIL is set, this function never returns false.  */
4474 
4475 static bool
4476 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4477 		     basic_block bb,
4478                      rtx insn,
4479 		     bool abort_if_fail)
4480 {
4481   bool ret1, ret2, ret3, ret4;
4482   unsigned int uid = INSN_UID (insn);
4483   struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4484 
4485   df_insn_refs_collect (collection_rec, bb, insn_info);
4486 
4487   if (!DF_INSN_UID_DEFS (uid))
4488     {
4489       /* The insn_rec was created but it was never filled out.  */
4490       if (abort_if_fail)
4491 	gcc_assert (0);
4492       else
4493 	return false;
4494     }
4495 
4496   /* Unfortunately we cannot opt out early if one of these is not
4497      right because the marks will not get cleared.  */
4498   ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4499 			 abort_if_fail);
4500   ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4501 			 abort_if_fail);
4502   ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4503 			 abort_if_fail);
4504   ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4505 		       abort_if_fail);
4506   return (ret1 && ret2 && ret3 && ret4);
4507 }
4508 
4509 
4510 /* Return true if all refs in the basic block are correct and complete.
4511    Due to df_ref_chain_verify, it will cause all refs
4512    that are verified to have DF_REF_MARK bit set.  */
4513 
4514 static bool
4515 df_bb_verify (basic_block bb)
4516 {
4517   rtx insn;
4518   struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4519   struct df_collection_rec collection_rec;
4520 
4521   memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4522   collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
4523   collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
4524   collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
4525   collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
4526 
4527   gcc_assert (bb_info);
4528 
4529   /* Scan the block, one insn at a time, from beginning to end.  */
4530   FOR_BB_INSNS_REVERSE (bb, insn)
4531     {
4532       if (!INSN_P (insn))
4533         continue;
4534       df_insn_refs_verify (&collection_rec, bb, insn, true);
4535       df_free_collection_rec (&collection_rec);
4536     }
4537 
4538   /* Do the artificial defs and uses.  */
4539   df_bb_refs_collect (&collection_rec, bb);
4540   df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4541   df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4542   df_free_collection_rec (&collection_rec);
4543 
4544   return true;
4545 }
4546 
4547 
4548 /* Returns true if the entry block has correct and complete df_ref set.
4549    If not it either aborts if ABORT_IF_FAIL is true or returns false.  */
4550 
4551 static bool
4552 df_entry_block_bitmap_verify (bool abort_if_fail)
4553 {
4554   bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4555   bool is_eq;
4556 
4557   df_get_entry_block_def_set (entry_block_defs);
4558 
4559   is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4560 
4561   if (!is_eq && abort_if_fail)
4562     {
4563       print_current_pass (stderr);
4564       fprintf (stderr, "entry_block_defs = ");
4565       df_print_regset (stderr, entry_block_defs);
4566       fprintf (stderr, "df->entry_block_defs = ");
4567       df_print_regset (stderr, df->entry_block_defs);
4568       gcc_assert (0);
4569     }
4570 
4571   BITMAP_FREE (entry_block_defs);
4572 
4573   return is_eq;
4574 }
4575 
4576 
4577 /* Returns true if the exit block has correct and complete df_ref set.
4578    If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4579 
4580 static bool
4581 df_exit_block_bitmap_verify (bool abort_if_fail)
4582 {
4583   bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4584   bool is_eq;
4585 
4586   df_get_exit_block_use_set (exit_block_uses);
4587 
4588   is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4589 
4590   if (!is_eq && abort_if_fail)
4591     {
4592       print_current_pass (stderr);
4593       fprintf (stderr, "exit_block_uses = ");
4594       df_print_regset (stderr, exit_block_uses);
4595       fprintf (stderr, "df->exit_block_uses = ");
4596       df_print_regset (stderr, df->exit_block_uses);
4597       gcc_assert (0);
4598     }
4599 
4600   BITMAP_FREE (exit_block_uses);
4601 
4602   return is_eq;
4603 }
4604 
4605 
4606 /* Return true if df_ref information for all insns in all blocks are
4607    correct and complete.  */
4608 
4609 void
4610 df_scan_verify (void)
4611 {
4612   unsigned int i;
4613   basic_block bb;
4614   bitmap regular_block_artificial_uses;
4615   bitmap eh_block_artificial_uses;
4616 
4617   if (!df)
4618     return;
4619 
4620   /* Verification is a 4 step process. */
4621 
4622   /* (1) All of the refs are marked by going thru the reg chains.  */
4623   for (i = 0; i < DF_REG_SIZE (df); i++)
4624     {
4625       gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4626 		  == DF_REG_DEF_COUNT(i));
4627       gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4628 		  == DF_REG_USE_COUNT(i));
4629       gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4630 		  == DF_REG_EQ_USE_COUNT(i));
4631     }
4632 
4633   /* (2) There are various bitmaps whose value may change over the
4634      course of the compilation.  This step recomputes them to make
4635      sure that they have not slipped out of date.  */
4636   regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4637   eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4638 
4639   df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4640   df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4641 
4642   bitmap_ior_into (eh_block_artificial_uses,
4643 		   regular_block_artificial_uses);
4644 
4645   /* Check artificial_uses bitmaps didn't change. */
4646   gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4647 			      df->regular_block_artificial_uses));
4648   gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4649 			      df->eh_block_artificial_uses));
4650 
4651   BITMAP_FREE (regular_block_artificial_uses);
4652   BITMAP_FREE (eh_block_artificial_uses);
4653 
4654   /* Verify entry block and exit block. These only verify the bitmaps,
4655      the refs are verified in df_bb_verify.  */
4656   df_entry_block_bitmap_verify (true);
4657   df_exit_block_bitmap_verify (true);
4658 
4659   /* (3) All of the insns in all of the blocks are traversed and the
4660      marks are cleared both in the artificial refs attached to the
4661      blocks and the real refs inside the insns.  It is a failure to
4662      clear a mark that has not been set as this means that the ref in
4663      the block or insn was not in the reg chain.  */
4664 
4665   FOR_ALL_BB (bb)
4666     df_bb_verify (bb);
4667 
4668   /* (4) See if all reg chains are traversed a second time.  This time
4669      a check is made that the marks are clear. A set mark would be a
4670      from a reg that is not in any insn or basic block.  */
4671 
4672   for (i = 0; i < DF_REG_SIZE (df); i++)
4673     {
4674       df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4675       df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4676       df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4677     }
4678 }
4679