1*c87b03e5Sespie /* Dataflow support routines.
2*c87b03e5Sespie Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3*c87b03e5Sespie Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
4*c87b03e5Sespie mhayes@redhat.com)
5*c87b03e5Sespie
6*c87b03e5Sespie This file is part of GCC.
7*c87b03e5Sespie
8*c87b03e5Sespie GCC is free software; you can redistribute it and/or modify it under
9*c87b03e5Sespie the terms of the GNU General Public License as published by the Free
10*c87b03e5Sespie Software Foundation; either version 2, or (at your option) any later
11*c87b03e5Sespie version.
12*c87b03e5Sespie
13*c87b03e5Sespie GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*c87b03e5Sespie WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*c87b03e5Sespie FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16*c87b03e5Sespie for more details.
17*c87b03e5Sespie
18*c87b03e5Sespie You should have received a copy of the GNU General Public License
19*c87b03e5Sespie along with GCC; see the file COPYING. If not, write to the Free
20*c87b03e5Sespie Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21*c87b03e5Sespie 02111-1307, USA.
22*c87b03e5Sespie
23*c87b03e5Sespie
24*c87b03e5Sespie OVERVIEW:
25*c87b03e5Sespie
26*c87b03e5Sespie This file provides some dataflow routines for computing reaching defs,
27*c87b03e5Sespie upward exposed uses, live variables, def-use chains, and use-def
28*c87b03e5Sespie chains. The global dataflow is performed using simple iterative
29*c87b03e5Sespie methods with a worklist and could be sped up by ordering the blocks
30*c87b03e5Sespie with a depth first search order.
31*c87b03e5Sespie
32*c87b03e5Sespie A `struct ref' data structure (ref) is allocated for every register
33*c87b03e5Sespie reference (def or use) and this records the insn and bb the ref is
34*c87b03e5Sespie found within. The refs are linked together in chains of uses and defs
35*c87b03e5Sespie for each insn and for each register. Each ref also has a chain field
36*c87b03e5Sespie that links all the use refs for a def or all the def refs for a use.
37*c87b03e5Sespie This is used to create use-def or def-use chains.
38*c87b03e5Sespie
39*c87b03e5Sespie
40*c87b03e5Sespie USAGE:
41*c87b03e5Sespie
42*c87b03e5Sespie Here's an example of using the dataflow routines.
43*c87b03e5Sespie
44*c87b03e5Sespie struct df *df;
45*c87b03e5Sespie
46*c87b03e5Sespie df = df_init ();
47*c87b03e5Sespie
48*c87b03e5Sespie df_analyse (df, 0, DF_ALL);
49*c87b03e5Sespie
50*c87b03e5Sespie df_dump (df, DF_ALL, stderr);
51*c87b03e5Sespie
52*c87b03e5Sespie df_finish (df);
53*c87b03e5Sespie
54*c87b03e5Sespie
55*c87b03e5Sespie df_init simply creates a poor man's object (df) that needs to be
56*c87b03e5Sespie passed to all the dataflow routines. df_finish destroys this
57*c87b03e5Sespie object and frees up any allocated memory.
58*c87b03e5Sespie
59*c87b03e5Sespie df_analyse performs the following:
60*c87b03e5Sespie
61*c87b03e5Sespie 1. Records defs and uses by scanning the insns in each basic block
62*c87b03e5Sespie or by scanning the insns queued by df_insn_modify.
63*c87b03e5Sespie 2. Links defs and uses into insn-def and insn-use chains.
64*c87b03e5Sespie 3. Links defs and uses into reg-def and reg-use chains.
65*c87b03e5Sespie 4. Assigns LUIDs to each insn (for modified blocks).
66*c87b03e5Sespie 5. Calculates local reaching definitions.
67*c87b03e5Sespie 6. Calculates global reaching definitions.
68*c87b03e5Sespie 7. Creates use-def chains.
69*c87b03e5Sespie 8. Calculates local reaching uses (upwards exposed uses).
70*c87b03e5Sespie 9. Calculates global reaching uses.
71*c87b03e5Sespie 10. Creates def-use chains.
72*c87b03e5Sespie 11. Calculates local live registers.
73*c87b03e5Sespie 12. Calculates global live registers.
74*c87b03e5Sespie 13. Calculates register lifetimes and determines local registers.
75*c87b03e5Sespie
76*c87b03e5Sespie
77*c87b03e5Sespie PHILOSOPHY:
78*c87b03e5Sespie
79*c87b03e5Sespie Note that the dataflow information is not updated for every newly
80*c87b03e5Sespie deleted or created insn. If the dataflow information requires
81*c87b03e5Sespie updating then all the changed, new, or deleted insns needs to be
82*c87b03e5Sespie marked with df_insn_modify (or df_insns_modify) either directly or
83*c87b03e5Sespie indirectly (say through calling df_insn_delete). df_insn_modify
84*c87b03e5Sespie marks all the modified insns to get processed the next time df_analyse
85*c87b03e5Sespie is called.
86*c87b03e5Sespie
87*c87b03e5Sespie Beware that tinkering with insns may invalidate the dataflow information.
88*c87b03e5Sespie The philosophy behind these routines is that once the dataflow
89*c87b03e5Sespie information has been gathered, the user should store what they require
90*c87b03e5Sespie before they tinker with any insn. Once a reg is replaced, for example,
91*c87b03e5Sespie then the reg-def/reg-use chains will point to the wrong place. Once a
92*c87b03e5Sespie whole lot of changes have been made, df_analyse can be called again
93*c87b03e5Sespie to update the dataflow information. Currently, this is not very smart
94*c87b03e5Sespie with regard to propagating changes to the dataflow so it should not
95*c87b03e5Sespie be called very often.
96*c87b03e5Sespie
97*c87b03e5Sespie
98*c87b03e5Sespie DATA STRUCTURES:
99*c87b03e5Sespie
100*c87b03e5Sespie The basic object is a REF (reference) and this may either be a DEF
101*c87b03e5Sespie (definition) or a USE of a register.
102*c87b03e5Sespie
103*c87b03e5Sespie These are linked into a variety of lists; namely reg-def, reg-use,
104*c87b03e5Sespie insn-def, insn-use, def-use, and use-def lists. For example,
105*c87b03e5Sespie the reg-def lists contain all the refs that define a given register
106*c87b03e5Sespie while the insn-use lists contain all the refs used by an insn.
107*c87b03e5Sespie
108*c87b03e5Sespie Note that the reg-def and reg-use chains are generally short (except for the
109*c87b03e5Sespie hard registers) and thus it is much faster to search these chains
110*c87b03e5Sespie rather than searching the def or use bitmaps.
111*c87b03e5Sespie
112*c87b03e5Sespie If the insns are in SSA form then the reg-def and use-def lists
113*c87b03e5Sespie should only contain the single defining ref.
114*c87b03e5Sespie
115*c87b03e5Sespie TODO:
116*c87b03e5Sespie
117*c87b03e5Sespie 1) Incremental dataflow analysis.
118*c87b03e5Sespie
119*c87b03e5Sespie Note that if a loop invariant insn is hoisted (or sunk), we do not
120*c87b03e5Sespie need to change the def-use or use-def chains. All we have to do is to
121*c87b03e5Sespie change the bb field for all the associated defs and uses and to
122*c87b03e5Sespie renumber the LUIDs for the original and new basic blocks of the insn.
123*c87b03e5Sespie
124*c87b03e5Sespie When shadowing loop mems we create new uses and defs for new pseudos
125*c87b03e5Sespie so we do not affect the existing dataflow information.
126*c87b03e5Sespie
127*c87b03e5Sespie My current strategy is to queue up all modified, created, or deleted
128*c87b03e5Sespie insns so when df_analyse is called we can easily determine all the new
129*c87b03e5Sespie or deleted refs. Currently the global dataflow information is
130*c87b03e5Sespie recomputed from scratch but this could be propagated more efficiently.
131*c87b03e5Sespie
132*c87b03e5Sespie 2) Improved global data flow computation using depth first search.
133*c87b03e5Sespie
134*c87b03e5Sespie 3) Reduced memory requirements.
135*c87b03e5Sespie
136*c87b03e5Sespie We could operate a pool of ref structures. When a ref is deleted it
137*c87b03e5Sespie gets returned to the pool (say by linking on to a chain of free refs).
138*c87b03e5Sespie This will require a pair of bitmaps for defs and uses so that we can
139*c87b03e5Sespie tell which ones have been changed. Alternatively, we could
140*c87b03e5Sespie periodically squeeze the def and use tables and associated bitmaps and
141*c87b03e5Sespie renumber the def and use ids.
142*c87b03e5Sespie
143*c87b03e5Sespie 4) Ordering of reg-def and reg-use lists.
144*c87b03e5Sespie
145*c87b03e5Sespie Should the first entry in the def list be the first def (within a BB)?
146*c87b03e5Sespie Similarly, should the first entry in the use list be the last use
147*c87b03e5Sespie (within a BB)?
148*c87b03e5Sespie
149*c87b03e5Sespie 5) Working with a sub-CFG.
150*c87b03e5Sespie
151*c87b03e5Sespie Often the whole CFG does not need to be analysed, for example,
152*c87b03e5Sespie when optimising a loop, only certain registers are of interest.
153*c87b03e5Sespie Perhaps there should be a bitmap argument to df_analyse to specify
154*c87b03e5Sespie which registers should be analysed? */
155*c87b03e5Sespie
156*c87b03e5Sespie #include "config.h"
157*c87b03e5Sespie #include "system.h"
158*c87b03e5Sespie #include "rtl.h"
159*c87b03e5Sespie #include "tm_p.h"
160*c87b03e5Sespie #include "insn-config.h"
161*c87b03e5Sespie #include "recog.h"
162*c87b03e5Sespie #include "function.h"
163*c87b03e5Sespie #include "regs.h"
164*c87b03e5Sespie #include "obstack.h"
165*c87b03e5Sespie #include "hard-reg-set.h"
166*c87b03e5Sespie #include "basic-block.h"
167*c87b03e5Sespie #include "sbitmap.h"
168*c87b03e5Sespie #include "bitmap.h"
169*c87b03e5Sespie #include "df.h"
170*c87b03e5Sespie #include "fibheap.h"
171*c87b03e5Sespie
172*c87b03e5Sespie #define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
173*c87b03e5Sespie do \
174*c87b03e5Sespie { \
175*c87b03e5Sespie unsigned int node_; \
176*c87b03e5Sespie EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, \
177*c87b03e5Sespie {(BB) = BASIC_BLOCK (node_); CODE;}); \
178*c87b03e5Sespie } \
179*c87b03e5Sespie while (0)
180*c87b03e5Sespie
181*c87b03e5Sespie static struct obstack df_ref_obstack;
182*c87b03e5Sespie static struct df *ddf;
183*c87b03e5Sespie
184*c87b03e5Sespie static void df_reg_table_realloc PARAMS((struct df *, int));
185*c87b03e5Sespie #if 0
186*c87b03e5Sespie static void df_def_table_realloc PARAMS((struct df *, int));
187*c87b03e5Sespie #endif
188*c87b03e5Sespie static void df_insn_table_realloc PARAMS((struct df *, unsigned int));
189*c87b03e5Sespie static void df_bitmaps_alloc PARAMS((struct df *, int));
190*c87b03e5Sespie static void df_bitmaps_free PARAMS((struct df *, int));
191*c87b03e5Sespie static void df_free PARAMS((struct df *));
192*c87b03e5Sespie static void df_alloc PARAMS((struct df *, int));
193*c87b03e5Sespie
194*c87b03e5Sespie static rtx df_reg_clobber_gen PARAMS((unsigned int));
195*c87b03e5Sespie static rtx df_reg_use_gen PARAMS((unsigned int));
196*c87b03e5Sespie
197*c87b03e5Sespie static inline struct df_link *df_link_create PARAMS((struct ref *,
198*c87b03e5Sespie struct df_link *));
199*c87b03e5Sespie static struct df_link *df_ref_unlink PARAMS((struct df_link **, struct ref *));
200*c87b03e5Sespie static void df_def_unlink PARAMS((struct df *, struct ref *));
201*c87b03e5Sespie static void df_use_unlink PARAMS((struct df *, struct ref *));
202*c87b03e5Sespie static void df_insn_refs_unlink PARAMS ((struct df *, basic_block, rtx));
203*c87b03e5Sespie #if 0
204*c87b03e5Sespie static void df_bb_refs_unlink PARAMS ((struct df *, basic_block));
205*c87b03e5Sespie static void df_refs_unlink PARAMS ((struct df *, bitmap));
206*c87b03e5Sespie #endif
207*c87b03e5Sespie
208*c87b03e5Sespie static struct ref *df_ref_create PARAMS((struct df *,
209*c87b03e5Sespie rtx, rtx *, rtx,
210*c87b03e5Sespie enum df_ref_type, enum df_ref_flags));
211*c87b03e5Sespie static void df_ref_record_1 PARAMS((struct df *, rtx, rtx *,
212*c87b03e5Sespie rtx, enum df_ref_type,
213*c87b03e5Sespie enum df_ref_flags));
214*c87b03e5Sespie static void df_ref_record PARAMS((struct df *, rtx, rtx *,
215*c87b03e5Sespie rtx, enum df_ref_type,
216*c87b03e5Sespie enum df_ref_flags));
217*c87b03e5Sespie static void df_def_record_1 PARAMS((struct df *, rtx, basic_block, rtx));
218*c87b03e5Sespie static void df_defs_record PARAMS((struct df *, rtx, basic_block, rtx));
219*c87b03e5Sespie static void df_uses_record PARAMS((struct df *, rtx *,
220*c87b03e5Sespie enum df_ref_type, basic_block, rtx,
221*c87b03e5Sespie enum df_ref_flags));
222*c87b03e5Sespie static void df_insn_refs_record PARAMS((struct df *, basic_block, rtx));
223*c87b03e5Sespie static void df_bb_refs_record PARAMS((struct df *, basic_block));
224*c87b03e5Sespie static void df_refs_record PARAMS((struct df *, bitmap));
225*c87b03e5Sespie
226*c87b03e5Sespie static void df_bb_reg_def_chain_create PARAMS((struct df *, basic_block));
227*c87b03e5Sespie static void df_reg_def_chain_create PARAMS((struct df *, bitmap));
228*c87b03e5Sespie static void df_bb_reg_use_chain_create PARAMS((struct df *, basic_block));
229*c87b03e5Sespie static void df_reg_use_chain_create PARAMS((struct df *, bitmap));
230*c87b03e5Sespie static void df_bb_du_chain_create PARAMS((struct df *, basic_block, bitmap));
231*c87b03e5Sespie static void df_du_chain_create PARAMS((struct df *, bitmap));
232*c87b03e5Sespie static void df_bb_ud_chain_create PARAMS((struct df *, basic_block));
233*c87b03e5Sespie static void df_ud_chain_create PARAMS((struct df *, bitmap));
234*c87b03e5Sespie static void df_bb_rd_local_compute PARAMS((struct df *, basic_block));
235*c87b03e5Sespie static void df_rd_local_compute PARAMS((struct df *, bitmap));
236*c87b03e5Sespie static void df_bb_ru_local_compute PARAMS((struct df *, basic_block));
237*c87b03e5Sespie static void df_ru_local_compute PARAMS((struct df *, bitmap));
238*c87b03e5Sespie static void df_bb_lr_local_compute PARAMS((struct df *, basic_block));
239*c87b03e5Sespie static void df_lr_local_compute PARAMS((struct df *, bitmap));
240*c87b03e5Sespie static void df_bb_reg_info_compute PARAMS((struct df *, basic_block, bitmap));
241*c87b03e5Sespie static void df_reg_info_compute PARAMS((struct df *, bitmap));
242*c87b03e5Sespie
243*c87b03e5Sespie static int df_bb_luids_set PARAMS((struct df *df, basic_block));
244*c87b03e5Sespie static int df_luids_set PARAMS((struct df *df, bitmap));
245*c87b03e5Sespie
246*c87b03e5Sespie static int df_modified_p PARAMS ((struct df *, bitmap));
247*c87b03e5Sespie static int df_refs_queue PARAMS ((struct df *));
248*c87b03e5Sespie static int df_refs_process PARAMS ((struct df *));
249*c87b03e5Sespie static int df_bb_refs_update PARAMS ((struct df *, basic_block));
250*c87b03e5Sespie static int df_refs_update PARAMS ((struct df *));
251*c87b03e5Sespie static void df_analyse_1 PARAMS((struct df *, bitmap, int, int));
252*c87b03e5Sespie
253*c87b03e5Sespie static void df_insns_modify PARAMS((struct df *, basic_block,
254*c87b03e5Sespie rtx, rtx));
255*c87b03e5Sespie static int df_rtx_mem_replace PARAMS ((rtx *, void *));
256*c87b03e5Sespie static int df_rtx_reg_replace PARAMS ((rtx *, void *));
257*c87b03e5Sespie void df_refs_reg_replace PARAMS ((struct df *, bitmap,
258*c87b03e5Sespie struct df_link *, rtx, rtx));
259*c87b03e5Sespie
260*c87b03e5Sespie static int df_def_dominates_all_uses_p PARAMS((struct df *, struct ref *def));
261*c87b03e5Sespie static int df_def_dominates_uses_p PARAMS((struct df *,
262*c87b03e5Sespie struct ref *def, bitmap));
263*c87b03e5Sespie static struct ref *df_bb_regno_last_use_find PARAMS((struct df *, basic_block,
264*c87b03e5Sespie unsigned int));
265*c87b03e5Sespie static struct ref *df_bb_regno_first_def_find PARAMS((struct df *, basic_block,
266*c87b03e5Sespie unsigned int));
267*c87b03e5Sespie static struct ref *df_bb_insn_regno_last_use_find PARAMS((struct df *,
268*c87b03e5Sespie basic_block,
269*c87b03e5Sespie rtx, unsigned int));
270*c87b03e5Sespie static struct ref *df_bb_insn_regno_first_def_find PARAMS((struct df *,
271*c87b03e5Sespie basic_block,
272*c87b03e5Sespie rtx, unsigned int));
273*c87b03e5Sespie
274*c87b03e5Sespie static void df_chain_dump PARAMS((struct df_link *, FILE *file));
275*c87b03e5Sespie static void df_chain_dump_regno PARAMS((struct df_link *, FILE *file));
276*c87b03e5Sespie static void df_regno_debug PARAMS ((struct df *, unsigned int, FILE *));
277*c87b03e5Sespie static void df_ref_debug PARAMS ((struct df *, struct ref *, FILE *));
278*c87b03e5Sespie static void df_rd_transfer_function PARAMS ((int, int *, bitmap, bitmap,
279*c87b03e5Sespie bitmap, bitmap, void *));
280*c87b03e5Sespie static void df_ru_transfer_function PARAMS ((int, int *, bitmap, bitmap,
281*c87b03e5Sespie bitmap, bitmap, void *));
282*c87b03e5Sespie static void df_lr_transfer_function PARAMS ((int, int *, bitmap, bitmap,
283*c87b03e5Sespie bitmap, bitmap, void *));
284*c87b03e5Sespie static void hybrid_search_bitmap PARAMS ((basic_block, bitmap *, bitmap *,
285*c87b03e5Sespie bitmap *, bitmap *, enum df_flow_dir,
286*c87b03e5Sespie enum df_confluence_op,
287*c87b03e5Sespie transfer_function_bitmap,
288*c87b03e5Sespie sbitmap, sbitmap, void *));
289*c87b03e5Sespie static void hybrid_search_sbitmap PARAMS ((basic_block, sbitmap *, sbitmap *,
290*c87b03e5Sespie sbitmap *, sbitmap *, enum df_flow_dir,
291*c87b03e5Sespie enum df_confluence_op,
292*c87b03e5Sespie transfer_function_sbitmap,
293*c87b03e5Sespie sbitmap, sbitmap, void *));
294*c87b03e5Sespie static inline bool read_modify_subreg_p PARAMS ((rtx));
295*c87b03e5Sespie
296*c87b03e5Sespie
297*c87b03e5Sespie /* Local memory allocation/deallocation routines. */
298*c87b03e5Sespie
299*c87b03e5Sespie
300*c87b03e5Sespie /* Increase the insn info table to have space for at least SIZE + 1
301*c87b03e5Sespie elements. */
302*c87b03e5Sespie static void
df_insn_table_realloc(df,size)303*c87b03e5Sespie df_insn_table_realloc (df, size)
304*c87b03e5Sespie struct df *df;
305*c87b03e5Sespie unsigned int size;
306*c87b03e5Sespie {
307*c87b03e5Sespie size++;
308*c87b03e5Sespie if (size <= df->insn_size)
309*c87b03e5Sespie return;
310*c87b03e5Sespie
311*c87b03e5Sespie /* Make the table a little larger than requested, so we don't need
312*c87b03e5Sespie to enlarge it so often. */
313*c87b03e5Sespie size += df->insn_size / 4;
314*c87b03e5Sespie
315*c87b03e5Sespie df->insns = (struct insn_info *)
316*c87b03e5Sespie xrealloc (df->insns, size * sizeof (struct insn_info));
317*c87b03e5Sespie
318*c87b03e5Sespie memset (df->insns + df->insn_size, 0,
319*c87b03e5Sespie (size - df->insn_size) * sizeof (struct insn_info));
320*c87b03e5Sespie
321*c87b03e5Sespie df->insn_size = size;
322*c87b03e5Sespie
323*c87b03e5Sespie if (! df->insns_modified)
324*c87b03e5Sespie {
325*c87b03e5Sespie df->insns_modified = BITMAP_XMALLOC ();
326*c87b03e5Sespie bitmap_zero (df->insns_modified);
327*c87b03e5Sespie }
328*c87b03e5Sespie }
329*c87b03e5Sespie
330*c87b03e5Sespie
331*c87b03e5Sespie /* Increase the reg info table by SIZE more elements. */
332*c87b03e5Sespie static void
df_reg_table_realloc(df,size)333*c87b03e5Sespie df_reg_table_realloc (df, size)
334*c87b03e5Sespie struct df *df;
335*c87b03e5Sespie int size;
336*c87b03e5Sespie {
337*c87b03e5Sespie /* Make table 25 percent larger by default. */
338*c87b03e5Sespie if (! size)
339*c87b03e5Sespie size = df->reg_size / 4;
340*c87b03e5Sespie
341*c87b03e5Sespie size += df->reg_size;
342*c87b03e5Sespie if (size < max_reg_num ())
343*c87b03e5Sespie size = max_reg_num ();
344*c87b03e5Sespie
345*c87b03e5Sespie df->regs = (struct reg_info *)
346*c87b03e5Sespie xrealloc (df->regs, size * sizeof (struct reg_info));
347*c87b03e5Sespie
348*c87b03e5Sespie /* Zero the new entries. */
349*c87b03e5Sespie memset (df->regs + df->reg_size, 0,
350*c87b03e5Sespie (size - df->reg_size) * sizeof (struct reg_info));
351*c87b03e5Sespie
352*c87b03e5Sespie df->reg_size = size;
353*c87b03e5Sespie }
354*c87b03e5Sespie
355*c87b03e5Sespie
356*c87b03e5Sespie #if 0
357*c87b03e5Sespie /* Not currently used. */
358*c87b03e5Sespie static void
359*c87b03e5Sespie df_def_table_realloc (df, size)
360*c87b03e5Sespie struct df *df;
361*c87b03e5Sespie int size;
362*c87b03e5Sespie {
363*c87b03e5Sespie int i;
364*c87b03e5Sespie struct ref *refs;
365*c87b03e5Sespie
366*c87b03e5Sespie /* Make table 25 percent larger by default. */
367*c87b03e5Sespie if (! size)
368*c87b03e5Sespie size = df->def_size / 4;
369*c87b03e5Sespie
370*c87b03e5Sespie df->def_size += size;
371*c87b03e5Sespie df->defs = xrealloc (df->defs,
372*c87b03e5Sespie df->def_size * sizeof (*df->defs));
373*c87b03e5Sespie
374*c87b03e5Sespie /* Allocate a new block of memory and link into list of blocks
375*c87b03e5Sespie that will need to be freed later. */
376*c87b03e5Sespie
377*c87b03e5Sespie refs = xmalloc (size * sizeof (*refs));
378*c87b03e5Sespie
379*c87b03e5Sespie /* Link all the new refs together, overloading the chain field. */
380*c87b03e5Sespie for (i = 0; i < size - 1; i++)
381*c87b03e5Sespie refs[i].chain = (struct df_link *) (refs + i + 1);
382*c87b03e5Sespie refs[size - 1].chain = 0;
383*c87b03e5Sespie }
384*c87b03e5Sespie #endif
385*c87b03e5Sespie
386*c87b03e5Sespie
387*c87b03e5Sespie
388*c87b03e5Sespie /* Allocate bitmaps for each basic block. */
389*c87b03e5Sespie static void
df_bitmaps_alloc(df,flags)390*c87b03e5Sespie df_bitmaps_alloc (df, flags)
391*c87b03e5Sespie struct df *df;
392*c87b03e5Sespie int flags;
393*c87b03e5Sespie {
394*c87b03e5Sespie int dflags = 0;
395*c87b03e5Sespie basic_block bb;
396*c87b03e5Sespie
397*c87b03e5Sespie /* Free the bitmaps if they need resizing. */
398*c87b03e5Sespie if ((flags & DF_LR) && df->n_regs < (unsigned int) max_reg_num ())
399*c87b03e5Sespie dflags |= DF_LR | DF_RU;
400*c87b03e5Sespie if ((flags & DF_RU) && df->n_uses < df->use_id)
401*c87b03e5Sespie dflags |= DF_RU;
402*c87b03e5Sespie if ((flags & DF_RD) && df->n_defs < df->def_id)
403*c87b03e5Sespie dflags |= DF_RD;
404*c87b03e5Sespie
405*c87b03e5Sespie if (dflags)
406*c87b03e5Sespie df_bitmaps_free (df, dflags);
407*c87b03e5Sespie
408*c87b03e5Sespie df->n_defs = df->def_id;
409*c87b03e5Sespie df->n_uses = df->use_id;
410*c87b03e5Sespie
411*c87b03e5Sespie FOR_EACH_BB (bb)
412*c87b03e5Sespie {
413*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
414*c87b03e5Sespie
415*c87b03e5Sespie if (flags & DF_RD && ! bb_info->rd_in)
416*c87b03e5Sespie {
417*c87b03e5Sespie /* Allocate bitmaps for reaching definitions. */
418*c87b03e5Sespie bb_info->rd_kill = BITMAP_XMALLOC ();
419*c87b03e5Sespie bitmap_zero (bb_info->rd_kill);
420*c87b03e5Sespie bb_info->rd_gen = BITMAP_XMALLOC ();
421*c87b03e5Sespie bitmap_zero (bb_info->rd_gen);
422*c87b03e5Sespie bb_info->rd_in = BITMAP_XMALLOC ();
423*c87b03e5Sespie bb_info->rd_out = BITMAP_XMALLOC ();
424*c87b03e5Sespie bb_info->rd_valid = 0;
425*c87b03e5Sespie }
426*c87b03e5Sespie
427*c87b03e5Sespie if (flags & DF_RU && ! bb_info->ru_in)
428*c87b03e5Sespie {
429*c87b03e5Sespie /* Allocate bitmaps for upward exposed uses. */
430*c87b03e5Sespie bb_info->ru_kill = BITMAP_XMALLOC ();
431*c87b03e5Sespie bitmap_zero (bb_info->ru_kill);
432*c87b03e5Sespie /* Note the lack of symmetry. */
433*c87b03e5Sespie bb_info->ru_gen = BITMAP_XMALLOC ();
434*c87b03e5Sespie bitmap_zero (bb_info->ru_gen);
435*c87b03e5Sespie bb_info->ru_in = BITMAP_XMALLOC ();
436*c87b03e5Sespie bb_info->ru_out = BITMAP_XMALLOC ();
437*c87b03e5Sespie bb_info->ru_valid = 0;
438*c87b03e5Sespie }
439*c87b03e5Sespie
440*c87b03e5Sespie if (flags & DF_LR && ! bb_info->lr_in)
441*c87b03e5Sespie {
442*c87b03e5Sespie /* Allocate bitmaps for live variables. */
443*c87b03e5Sespie bb_info->lr_def = BITMAP_XMALLOC ();
444*c87b03e5Sespie bitmap_zero (bb_info->lr_def);
445*c87b03e5Sespie bb_info->lr_use = BITMAP_XMALLOC ();
446*c87b03e5Sespie bitmap_zero (bb_info->lr_use);
447*c87b03e5Sespie bb_info->lr_in = BITMAP_XMALLOC ();
448*c87b03e5Sespie bb_info->lr_out = BITMAP_XMALLOC ();
449*c87b03e5Sespie bb_info->lr_valid = 0;
450*c87b03e5Sespie }
451*c87b03e5Sespie }
452*c87b03e5Sespie }
453*c87b03e5Sespie
454*c87b03e5Sespie
455*c87b03e5Sespie /* Free bitmaps for each basic block. */
456*c87b03e5Sespie static void
df_bitmaps_free(df,flags)457*c87b03e5Sespie df_bitmaps_free (df, flags)
458*c87b03e5Sespie struct df *df ATTRIBUTE_UNUSED;
459*c87b03e5Sespie int flags;
460*c87b03e5Sespie {
461*c87b03e5Sespie basic_block bb;
462*c87b03e5Sespie
463*c87b03e5Sespie FOR_EACH_BB (bb)
464*c87b03e5Sespie {
465*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
466*c87b03e5Sespie
467*c87b03e5Sespie if (!bb_info)
468*c87b03e5Sespie continue;
469*c87b03e5Sespie
470*c87b03e5Sespie if ((flags & DF_RD) && bb_info->rd_in)
471*c87b03e5Sespie {
472*c87b03e5Sespie /* Free bitmaps for reaching definitions. */
473*c87b03e5Sespie BITMAP_XFREE (bb_info->rd_kill);
474*c87b03e5Sespie bb_info->rd_kill = NULL;
475*c87b03e5Sespie BITMAP_XFREE (bb_info->rd_gen);
476*c87b03e5Sespie bb_info->rd_gen = NULL;
477*c87b03e5Sespie BITMAP_XFREE (bb_info->rd_in);
478*c87b03e5Sespie bb_info->rd_in = NULL;
479*c87b03e5Sespie BITMAP_XFREE (bb_info->rd_out);
480*c87b03e5Sespie bb_info->rd_out = NULL;
481*c87b03e5Sespie }
482*c87b03e5Sespie
483*c87b03e5Sespie if ((flags & DF_RU) && bb_info->ru_in)
484*c87b03e5Sespie {
485*c87b03e5Sespie /* Free bitmaps for upward exposed uses. */
486*c87b03e5Sespie BITMAP_XFREE (bb_info->ru_kill);
487*c87b03e5Sespie bb_info->ru_kill = NULL;
488*c87b03e5Sespie BITMAP_XFREE (bb_info->ru_gen);
489*c87b03e5Sespie bb_info->ru_gen = NULL;
490*c87b03e5Sespie BITMAP_XFREE (bb_info->ru_in);
491*c87b03e5Sespie bb_info->ru_in = NULL;
492*c87b03e5Sespie BITMAP_XFREE (bb_info->ru_out);
493*c87b03e5Sespie bb_info->ru_out = NULL;
494*c87b03e5Sespie }
495*c87b03e5Sespie
496*c87b03e5Sespie if ((flags & DF_LR) && bb_info->lr_in)
497*c87b03e5Sespie {
498*c87b03e5Sespie /* Free bitmaps for live variables. */
499*c87b03e5Sespie BITMAP_XFREE (bb_info->lr_def);
500*c87b03e5Sespie bb_info->lr_def = NULL;
501*c87b03e5Sespie BITMAP_XFREE (bb_info->lr_use);
502*c87b03e5Sespie bb_info->lr_use = NULL;
503*c87b03e5Sespie BITMAP_XFREE (bb_info->lr_in);
504*c87b03e5Sespie bb_info->lr_in = NULL;
505*c87b03e5Sespie BITMAP_XFREE (bb_info->lr_out);
506*c87b03e5Sespie bb_info->lr_out = NULL;
507*c87b03e5Sespie }
508*c87b03e5Sespie }
509*c87b03e5Sespie df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
510*c87b03e5Sespie }
511*c87b03e5Sespie
512*c87b03e5Sespie
513*c87b03e5Sespie /* Allocate and initialize dataflow memory. */
514*c87b03e5Sespie static void
df_alloc(df,n_regs)515*c87b03e5Sespie df_alloc (df, n_regs)
516*c87b03e5Sespie struct df *df;
517*c87b03e5Sespie int n_regs;
518*c87b03e5Sespie {
519*c87b03e5Sespie int n_insns;
520*c87b03e5Sespie basic_block bb;
521*c87b03e5Sespie
522*c87b03e5Sespie gcc_obstack_init (&df_ref_obstack);
523*c87b03e5Sespie
524*c87b03e5Sespie /* Perhaps we should use LUIDs to save memory for the insn_refs
525*c87b03e5Sespie table. This is only a small saving; a few pointers. */
526*c87b03e5Sespie n_insns = get_max_uid () + 1;
527*c87b03e5Sespie
528*c87b03e5Sespie df->def_id = 0;
529*c87b03e5Sespie df->n_defs = 0;
530*c87b03e5Sespie /* Approximate number of defs by number of insns. */
531*c87b03e5Sespie df->def_size = n_insns;
532*c87b03e5Sespie df->defs = xmalloc (df->def_size * sizeof (*df->defs));
533*c87b03e5Sespie
534*c87b03e5Sespie df->use_id = 0;
535*c87b03e5Sespie df->n_uses = 0;
536*c87b03e5Sespie /* Approximate number of uses by twice number of insns. */
537*c87b03e5Sespie df->use_size = n_insns * 2;
538*c87b03e5Sespie df->uses = xmalloc (df->use_size * sizeof (*df->uses));
539*c87b03e5Sespie
540*c87b03e5Sespie df->n_regs = n_regs;
541*c87b03e5Sespie df->n_bbs = last_basic_block;
542*c87b03e5Sespie
543*c87b03e5Sespie /* Allocate temporary working array used during local dataflow analysis. */
544*c87b03e5Sespie df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
545*c87b03e5Sespie
546*c87b03e5Sespie df_insn_table_realloc (df, n_insns);
547*c87b03e5Sespie
548*c87b03e5Sespie df_reg_table_realloc (df, df->n_regs);
549*c87b03e5Sespie
550*c87b03e5Sespie df->bbs_modified = BITMAP_XMALLOC ();
551*c87b03e5Sespie bitmap_zero (df->bbs_modified);
552*c87b03e5Sespie
553*c87b03e5Sespie df->flags = 0;
554*c87b03e5Sespie
555*c87b03e5Sespie df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
556*c87b03e5Sespie
557*c87b03e5Sespie df->all_blocks = BITMAP_XMALLOC ();
558*c87b03e5Sespie FOR_EACH_BB (bb)
559*c87b03e5Sespie bitmap_set_bit (df->all_blocks, bb->index);
560*c87b03e5Sespie }
561*c87b03e5Sespie
562*c87b03e5Sespie
563*c87b03e5Sespie /* Free all the dataflow info. */
564*c87b03e5Sespie static void
df_free(df)565*c87b03e5Sespie df_free (df)
566*c87b03e5Sespie struct df *df;
567*c87b03e5Sespie {
568*c87b03e5Sespie df_bitmaps_free (df, DF_ALL);
569*c87b03e5Sespie
570*c87b03e5Sespie if (df->bbs)
571*c87b03e5Sespie free (df->bbs);
572*c87b03e5Sespie df->bbs = 0;
573*c87b03e5Sespie
574*c87b03e5Sespie if (df->insns)
575*c87b03e5Sespie free (df->insns);
576*c87b03e5Sespie df->insns = 0;
577*c87b03e5Sespie df->insn_size = 0;
578*c87b03e5Sespie
579*c87b03e5Sespie if (df->defs)
580*c87b03e5Sespie free (df->defs);
581*c87b03e5Sespie df->defs = 0;
582*c87b03e5Sespie df->def_size = 0;
583*c87b03e5Sespie df->def_id = 0;
584*c87b03e5Sespie
585*c87b03e5Sespie if (df->uses)
586*c87b03e5Sespie free (df->uses);
587*c87b03e5Sespie df->uses = 0;
588*c87b03e5Sespie df->use_size = 0;
589*c87b03e5Sespie df->use_id = 0;
590*c87b03e5Sespie
591*c87b03e5Sespie if (df->regs)
592*c87b03e5Sespie free (df->regs);
593*c87b03e5Sespie df->regs = 0;
594*c87b03e5Sespie df->reg_size = 0;
595*c87b03e5Sespie
596*c87b03e5Sespie if (df->bbs_modified)
597*c87b03e5Sespie BITMAP_XFREE (df->bbs_modified);
598*c87b03e5Sespie df->bbs_modified = 0;
599*c87b03e5Sespie
600*c87b03e5Sespie if (df->insns_modified)
601*c87b03e5Sespie BITMAP_XFREE (df->insns_modified);
602*c87b03e5Sespie df->insns_modified = 0;
603*c87b03e5Sespie
604*c87b03e5Sespie BITMAP_XFREE (df->all_blocks);
605*c87b03e5Sespie df->all_blocks = 0;
606*c87b03e5Sespie
607*c87b03e5Sespie obstack_free (&df_ref_obstack, NULL);
608*c87b03e5Sespie }
609*c87b03e5Sespie
610*c87b03e5Sespie /* Local miscellaneous routines. */
611*c87b03e5Sespie
612*c87b03e5Sespie /* Return a USE for register REGNO. */
df_reg_use_gen(regno)613*c87b03e5Sespie static rtx df_reg_use_gen (regno)
614*c87b03e5Sespie unsigned int regno;
615*c87b03e5Sespie {
616*c87b03e5Sespie rtx reg;
617*c87b03e5Sespie rtx use;
618*c87b03e5Sespie
619*c87b03e5Sespie reg = regno_reg_rtx[regno];
620*c87b03e5Sespie
621*c87b03e5Sespie use = gen_rtx_USE (GET_MODE (reg), reg);
622*c87b03e5Sespie return use;
623*c87b03e5Sespie }
624*c87b03e5Sespie
625*c87b03e5Sespie
626*c87b03e5Sespie /* Return a CLOBBER for register REGNO. */
df_reg_clobber_gen(regno)627*c87b03e5Sespie static rtx df_reg_clobber_gen (regno)
628*c87b03e5Sespie unsigned int regno;
629*c87b03e5Sespie {
630*c87b03e5Sespie rtx reg;
631*c87b03e5Sespie rtx use;
632*c87b03e5Sespie
633*c87b03e5Sespie reg = regno_reg_rtx[regno];
634*c87b03e5Sespie
635*c87b03e5Sespie use = gen_rtx_CLOBBER (GET_MODE (reg), reg);
636*c87b03e5Sespie return use;
637*c87b03e5Sespie }
638*c87b03e5Sespie
639*c87b03e5Sespie /* Local chain manipulation routines. */
640*c87b03e5Sespie
641*c87b03e5Sespie /* Create a link in a def-use or use-def chain. */
642*c87b03e5Sespie static inline struct df_link *
df_link_create(ref,next)643*c87b03e5Sespie df_link_create (ref, next)
644*c87b03e5Sespie struct ref *ref;
645*c87b03e5Sespie struct df_link *next;
646*c87b03e5Sespie {
647*c87b03e5Sespie struct df_link *link;
648*c87b03e5Sespie
649*c87b03e5Sespie link = (struct df_link *) obstack_alloc (&df_ref_obstack,
650*c87b03e5Sespie sizeof (*link));
651*c87b03e5Sespie link->next = next;
652*c87b03e5Sespie link->ref = ref;
653*c87b03e5Sespie return link;
654*c87b03e5Sespie }
655*c87b03e5Sespie
656*c87b03e5Sespie
657*c87b03e5Sespie /* Add REF to chain head pointed to by PHEAD. */
658*c87b03e5Sespie static struct df_link *
df_ref_unlink(phead,ref)659*c87b03e5Sespie df_ref_unlink (phead, ref)
660*c87b03e5Sespie struct df_link **phead;
661*c87b03e5Sespie struct ref *ref;
662*c87b03e5Sespie {
663*c87b03e5Sespie struct df_link *link = *phead;
664*c87b03e5Sespie
665*c87b03e5Sespie if (link)
666*c87b03e5Sespie {
667*c87b03e5Sespie if (! link->next)
668*c87b03e5Sespie {
669*c87b03e5Sespie /* Only a single ref. It must be the one we want.
670*c87b03e5Sespie If not, the def-use and use-def chains are likely to
671*c87b03e5Sespie be inconsistent. */
672*c87b03e5Sespie if (link->ref != ref)
673*c87b03e5Sespie abort ();
674*c87b03e5Sespie /* Now have an empty chain. */
675*c87b03e5Sespie *phead = NULL;
676*c87b03e5Sespie }
677*c87b03e5Sespie else
678*c87b03e5Sespie {
679*c87b03e5Sespie /* Multiple refs. One of them must be us. */
680*c87b03e5Sespie if (link->ref == ref)
681*c87b03e5Sespie *phead = link->next;
682*c87b03e5Sespie else
683*c87b03e5Sespie {
684*c87b03e5Sespie /* Follow chain. */
685*c87b03e5Sespie for (; link->next; link = link->next)
686*c87b03e5Sespie {
687*c87b03e5Sespie if (link->next->ref == ref)
688*c87b03e5Sespie {
689*c87b03e5Sespie /* Unlink from list. */
690*c87b03e5Sespie link->next = link->next->next;
691*c87b03e5Sespie return link->next;
692*c87b03e5Sespie }
693*c87b03e5Sespie }
694*c87b03e5Sespie }
695*c87b03e5Sespie }
696*c87b03e5Sespie }
697*c87b03e5Sespie return link;
698*c87b03e5Sespie }
699*c87b03e5Sespie
700*c87b03e5Sespie
701*c87b03e5Sespie /* Unlink REF from all def-use/use-def chains, etc. */
702*c87b03e5Sespie int
df_ref_remove(df,ref)703*c87b03e5Sespie df_ref_remove (df, ref)
704*c87b03e5Sespie struct df *df;
705*c87b03e5Sespie struct ref *ref;
706*c87b03e5Sespie {
707*c87b03e5Sespie if (DF_REF_REG_DEF_P (ref))
708*c87b03e5Sespie {
709*c87b03e5Sespie df_def_unlink (df, ref);
710*c87b03e5Sespie df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
711*c87b03e5Sespie }
712*c87b03e5Sespie else
713*c87b03e5Sespie {
714*c87b03e5Sespie df_use_unlink (df, ref);
715*c87b03e5Sespie df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
716*c87b03e5Sespie }
717*c87b03e5Sespie return 1;
718*c87b03e5Sespie }
719*c87b03e5Sespie
720*c87b03e5Sespie
721*c87b03e5Sespie /* Unlink DEF from use-def and reg-def chains. */
722*c87b03e5Sespie static void
df_def_unlink(df,def)723*c87b03e5Sespie df_def_unlink (df, def)
724*c87b03e5Sespie struct df *df ATTRIBUTE_UNUSED;
725*c87b03e5Sespie struct ref *def;
726*c87b03e5Sespie {
727*c87b03e5Sespie struct df_link *du_link;
728*c87b03e5Sespie unsigned int dregno = DF_REF_REGNO (def);
729*c87b03e5Sespie
730*c87b03e5Sespie /* Follow def-use chain to find all the uses of this def. */
731*c87b03e5Sespie for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
732*c87b03e5Sespie {
733*c87b03e5Sespie struct ref *use = du_link->ref;
734*c87b03e5Sespie
735*c87b03e5Sespie /* Unlink this def from the use-def chain. */
736*c87b03e5Sespie df_ref_unlink (&DF_REF_CHAIN (use), def);
737*c87b03e5Sespie }
738*c87b03e5Sespie DF_REF_CHAIN (def) = 0;
739*c87b03e5Sespie
740*c87b03e5Sespie /* Unlink def from reg-def chain. */
741*c87b03e5Sespie df_ref_unlink (&df->regs[dregno].defs, def);
742*c87b03e5Sespie
743*c87b03e5Sespie df->defs[DF_REF_ID (def)] = 0;
744*c87b03e5Sespie }
745*c87b03e5Sespie
746*c87b03e5Sespie
747*c87b03e5Sespie /* Unlink use from def-use and reg-use chains. */
748*c87b03e5Sespie static void
df_use_unlink(df,use)749*c87b03e5Sespie df_use_unlink (df, use)
750*c87b03e5Sespie struct df *df ATTRIBUTE_UNUSED;
751*c87b03e5Sespie struct ref *use;
752*c87b03e5Sespie {
753*c87b03e5Sespie struct df_link *ud_link;
754*c87b03e5Sespie unsigned int uregno = DF_REF_REGNO (use);
755*c87b03e5Sespie
756*c87b03e5Sespie /* Follow use-def chain to find all the defs of this use. */
757*c87b03e5Sespie for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
758*c87b03e5Sespie {
759*c87b03e5Sespie struct ref *def = ud_link->ref;
760*c87b03e5Sespie
761*c87b03e5Sespie /* Unlink this use from the def-use chain. */
762*c87b03e5Sespie df_ref_unlink (&DF_REF_CHAIN (def), use);
763*c87b03e5Sespie }
764*c87b03e5Sespie DF_REF_CHAIN (use) = 0;
765*c87b03e5Sespie
766*c87b03e5Sespie /* Unlink use from reg-use chain. */
767*c87b03e5Sespie df_ref_unlink (&df->regs[uregno].uses, use);
768*c87b03e5Sespie
769*c87b03e5Sespie df->uses[DF_REF_ID (use)] = 0;
770*c87b03e5Sespie }
771*c87b03e5Sespie
772*c87b03e5Sespie /* Local routines for recording refs. */
773*c87b03e5Sespie
774*c87b03e5Sespie
775*c87b03e5Sespie /* Create a new ref of type DF_REF_TYPE for register REG at address
776*c87b03e5Sespie LOC within INSN of BB. */
777*c87b03e5Sespie static struct ref *
df_ref_create(df,reg,loc,insn,ref_type,ref_flags)778*c87b03e5Sespie df_ref_create (df, reg, loc, insn, ref_type, ref_flags)
779*c87b03e5Sespie struct df *df;
780*c87b03e5Sespie rtx reg;
781*c87b03e5Sespie rtx *loc;
782*c87b03e5Sespie rtx insn;
783*c87b03e5Sespie enum df_ref_type ref_type;
784*c87b03e5Sespie enum df_ref_flags ref_flags;
785*c87b03e5Sespie {
786*c87b03e5Sespie struct ref *this_ref;
787*c87b03e5Sespie unsigned int uid;
788*c87b03e5Sespie
789*c87b03e5Sespie this_ref = (struct ref *) obstack_alloc (&df_ref_obstack,
790*c87b03e5Sespie sizeof (*this_ref));
791*c87b03e5Sespie DF_REF_REG (this_ref) = reg;
792*c87b03e5Sespie DF_REF_LOC (this_ref) = loc;
793*c87b03e5Sespie DF_REF_INSN (this_ref) = insn;
794*c87b03e5Sespie DF_REF_CHAIN (this_ref) = 0;
795*c87b03e5Sespie DF_REF_TYPE (this_ref) = ref_type;
796*c87b03e5Sespie DF_REF_FLAGS (this_ref) = ref_flags;
797*c87b03e5Sespie uid = INSN_UID (insn);
798*c87b03e5Sespie
799*c87b03e5Sespie if (ref_type == DF_REF_REG_DEF)
800*c87b03e5Sespie {
801*c87b03e5Sespie if (df->def_id >= df->def_size)
802*c87b03e5Sespie {
803*c87b03e5Sespie /* Make table 25 percent larger. */
804*c87b03e5Sespie df->def_size += (df->def_size / 4);
805*c87b03e5Sespie df->defs = xrealloc (df->defs,
806*c87b03e5Sespie df->def_size * sizeof (*df->defs));
807*c87b03e5Sespie }
808*c87b03e5Sespie DF_REF_ID (this_ref) = df->def_id;
809*c87b03e5Sespie df->defs[df->def_id++] = this_ref;
810*c87b03e5Sespie }
811*c87b03e5Sespie else
812*c87b03e5Sespie {
813*c87b03e5Sespie if (df->use_id >= df->use_size)
814*c87b03e5Sespie {
815*c87b03e5Sespie /* Make table 25 percent larger. */
816*c87b03e5Sespie df->use_size += (df->use_size / 4);
817*c87b03e5Sespie df->uses = xrealloc (df->uses,
818*c87b03e5Sespie df->use_size * sizeof (*df->uses));
819*c87b03e5Sespie }
820*c87b03e5Sespie DF_REF_ID (this_ref) = df->use_id;
821*c87b03e5Sespie df->uses[df->use_id++] = this_ref;
822*c87b03e5Sespie }
823*c87b03e5Sespie return this_ref;
824*c87b03e5Sespie }
825*c87b03e5Sespie
826*c87b03e5Sespie
827*c87b03e5Sespie /* Create a new reference of type DF_REF_TYPE for a single register REG,
828*c87b03e5Sespie used inside the LOC rtx of INSN. */
829*c87b03e5Sespie static void
df_ref_record_1(df,reg,loc,insn,ref_type,ref_flags)830*c87b03e5Sespie df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags)
831*c87b03e5Sespie struct df *df;
832*c87b03e5Sespie rtx reg;
833*c87b03e5Sespie rtx *loc;
834*c87b03e5Sespie rtx insn;
835*c87b03e5Sespie enum df_ref_type ref_type;
836*c87b03e5Sespie enum df_ref_flags ref_flags;
837*c87b03e5Sespie {
838*c87b03e5Sespie df_ref_create (df, reg, loc, insn, ref_type, ref_flags);
839*c87b03e5Sespie }
840*c87b03e5Sespie
841*c87b03e5Sespie
842*c87b03e5Sespie /* Create new references of type DF_REF_TYPE for each part of register REG
843*c87b03e5Sespie at address LOC within INSN of BB. */
844*c87b03e5Sespie static void
df_ref_record(df,reg,loc,insn,ref_type,ref_flags)845*c87b03e5Sespie df_ref_record (df, reg, loc, insn, ref_type, ref_flags)
846*c87b03e5Sespie struct df *df;
847*c87b03e5Sespie rtx reg;
848*c87b03e5Sespie rtx *loc;
849*c87b03e5Sespie rtx insn;
850*c87b03e5Sespie enum df_ref_type ref_type;
851*c87b03e5Sespie enum df_ref_flags ref_flags;
852*c87b03e5Sespie {
853*c87b03e5Sespie unsigned int regno;
854*c87b03e5Sespie
855*c87b03e5Sespie if (GET_CODE (reg) != REG && GET_CODE (reg) != SUBREG)
856*c87b03e5Sespie abort ();
857*c87b03e5Sespie
858*c87b03e5Sespie /* For the reg allocator we are interested in some SUBREG rtx's, but not
859*c87b03e5Sespie all. Notably only those representing a word extraction from a multi-word
860*c87b03e5Sespie reg. As written in the docu those should have the form
861*c87b03e5Sespie (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
862*c87b03e5Sespie XXX Is that true? We could also use the global word_mode variable. */
863*c87b03e5Sespie if (GET_CODE (reg) == SUBREG
864*c87b03e5Sespie && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
865*c87b03e5Sespie || GET_MODE_SIZE (GET_MODE (reg))
866*c87b03e5Sespie >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
867*c87b03e5Sespie {
868*c87b03e5Sespie loc = &SUBREG_REG (reg);
869*c87b03e5Sespie reg = *loc;
870*c87b03e5Sespie }
871*c87b03e5Sespie
872*c87b03e5Sespie regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
873*c87b03e5Sespie if (regno < FIRST_PSEUDO_REGISTER)
874*c87b03e5Sespie {
875*c87b03e5Sespie int i;
876*c87b03e5Sespie int endregno;
877*c87b03e5Sespie
878*c87b03e5Sespie if (! (df->flags & DF_HARD_REGS))
879*c87b03e5Sespie return;
880*c87b03e5Sespie
881*c87b03e5Sespie /* GET_MODE (reg) is correct here. We don't want to go into a SUBREG
882*c87b03e5Sespie for the mode, because we only want to add references to regs, which
883*c87b03e5Sespie are really referenced. E.g. a (subreg:SI (reg:DI 0) 0) does _not_
884*c87b03e5Sespie reference the whole reg 0 in DI mode (which would also include
885*c87b03e5Sespie reg 1, at least, if 0 and 1 are SImode registers). */
886*c87b03e5Sespie endregno = HARD_REGNO_NREGS (regno, GET_MODE (reg));
887*c87b03e5Sespie if (GET_CODE (reg) == SUBREG)
888*c87b03e5Sespie regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
889*c87b03e5Sespie SUBREG_BYTE (reg), GET_MODE (reg));
890*c87b03e5Sespie endregno += regno;
891*c87b03e5Sespie
892*c87b03e5Sespie for (i = regno; i < endregno; i++)
893*c87b03e5Sespie df_ref_record_1 (df, regno_reg_rtx[i],
894*c87b03e5Sespie loc, insn, ref_type, ref_flags);
895*c87b03e5Sespie }
896*c87b03e5Sespie else
897*c87b03e5Sespie {
898*c87b03e5Sespie df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags);
899*c87b03e5Sespie }
900*c87b03e5Sespie }
901*c87b03e5Sespie
902*c87b03e5Sespie /* Writes to paradoxical subregs, or subregs which are too narrow
903*c87b03e5Sespie are read-modify-write. */
904*c87b03e5Sespie
905*c87b03e5Sespie static inline bool
read_modify_subreg_p(x)906*c87b03e5Sespie read_modify_subreg_p (x)
907*c87b03e5Sespie rtx x;
908*c87b03e5Sespie {
909*c87b03e5Sespie unsigned int isize, osize;
910*c87b03e5Sespie if (GET_CODE (x) != SUBREG)
911*c87b03e5Sespie return false;
912*c87b03e5Sespie isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
913*c87b03e5Sespie osize = GET_MODE_SIZE (GET_MODE (x));
914*c87b03e5Sespie if (isize <= osize)
915*c87b03e5Sespie return true;
916*c87b03e5Sespie if (isize <= UNITS_PER_WORD)
917*c87b03e5Sespie return false;
918*c87b03e5Sespie if (osize >= UNITS_PER_WORD)
919*c87b03e5Sespie return false;
920*c87b03e5Sespie return true;
921*c87b03e5Sespie }
922*c87b03e5Sespie
923*c87b03e5Sespie /* Process all the registers defined in the rtx, X. */
924*c87b03e5Sespie static void
df_def_record_1(df,x,bb,insn)925*c87b03e5Sespie df_def_record_1 (df, x, bb, insn)
926*c87b03e5Sespie struct df *df;
927*c87b03e5Sespie rtx x;
928*c87b03e5Sespie basic_block bb;
929*c87b03e5Sespie rtx insn;
930*c87b03e5Sespie {
931*c87b03e5Sespie rtx *loc = &SET_DEST (x);
932*c87b03e5Sespie rtx dst = *loc;
933*c87b03e5Sespie enum df_ref_flags flags = 0;
934*c87b03e5Sespie
935*c87b03e5Sespie /* Some targets place small structures in registers for
936*c87b03e5Sespie return values of functions. */
937*c87b03e5Sespie if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
938*c87b03e5Sespie {
939*c87b03e5Sespie int i;
940*c87b03e5Sespie
941*c87b03e5Sespie for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
942*c87b03e5Sespie df_def_record_1 (df, XVECEXP (dst, 0, i), bb, insn);
943*c87b03e5Sespie return;
944*c87b03e5Sespie }
945*c87b03e5Sespie
946*c87b03e5Sespie #ifdef CLASS_CANNOT_CHANGE_MODE
947*c87b03e5Sespie if (GET_CODE (dst) == SUBREG
948*c87b03e5Sespie && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (dst)),
949*c87b03e5Sespie GET_MODE (dst)))
950*c87b03e5Sespie flags |= DF_REF_MODE_CHANGE;
951*c87b03e5Sespie #endif
952*c87b03e5Sespie
953*c87b03e5Sespie /* May be, we should flag the use of strict_low_part somehow. Might be
954*c87b03e5Sespie handy for the reg allocator. */
955*c87b03e5Sespie while (GET_CODE (dst) == STRICT_LOW_PART
956*c87b03e5Sespie || GET_CODE (dst) == ZERO_EXTRACT
957*c87b03e5Sespie || GET_CODE (dst) == SIGN_EXTRACT
958*c87b03e5Sespie || read_modify_subreg_p (dst))
959*c87b03e5Sespie {
960*c87b03e5Sespie /* Strict low part always contains SUBREG, but we don't want to make
961*c87b03e5Sespie it appear outside, as whole register is always considered. */
962*c87b03e5Sespie if (GET_CODE (dst) == STRICT_LOW_PART)
963*c87b03e5Sespie {
964*c87b03e5Sespie loc = &XEXP (dst, 0);
965*c87b03e5Sespie dst = *loc;
966*c87b03e5Sespie }
967*c87b03e5Sespie #ifdef CLASS_CANNOT_CHANGE_MODE
968*c87b03e5Sespie if (GET_CODE (dst) == SUBREG
969*c87b03e5Sespie && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (dst)),
970*c87b03e5Sespie GET_MODE (dst)))
971*c87b03e5Sespie flags |= DF_REF_MODE_CHANGE;
972*c87b03e5Sespie #endif
973*c87b03e5Sespie loc = &XEXP (dst, 0);
974*c87b03e5Sespie dst = *loc;
975*c87b03e5Sespie flags |= DF_REF_READ_WRITE;
976*c87b03e5Sespie }
977*c87b03e5Sespie
978*c87b03e5Sespie if (GET_CODE (dst) == REG
979*c87b03e5Sespie || (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG))
980*c87b03e5Sespie df_ref_record (df, dst, loc, insn, DF_REF_REG_DEF, flags);
981*c87b03e5Sespie }
982*c87b03e5Sespie
983*c87b03e5Sespie
984*c87b03e5Sespie /* Process all the registers defined in the pattern rtx, X. */
985*c87b03e5Sespie static void
df_defs_record(df,x,bb,insn)986*c87b03e5Sespie df_defs_record (df, x, bb, insn)
987*c87b03e5Sespie struct df *df;
988*c87b03e5Sespie rtx x;
989*c87b03e5Sespie basic_block bb;
990*c87b03e5Sespie rtx insn;
991*c87b03e5Sespie {
992*c87b03e5Sespie RTX_CODE code = GET_CODE (x);
993*c87b03e5Sespie
994*c87b03e5Sespie if (code == SET || code == CLOBBER)
995*c87b03e5Sespie {
996*c87b03e5Sespie /* Mark the single def within the pattern. */
997*c87b03e5Sespie df_def_record_1 (df, x, bb, insn);
998*c87b03e5Sespie }
999*c87b03e5Sespie else if (code == PARALLEL)
1000*c87b03e5Sespie {
1001*c87b03e5Sespie int i;
1002*c87b03e5Sespie
1003*c87b03e5Sespie /* Mark the multiple defs within the pattern. */
1004*c87b03e5Sespie for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1005*c87b03e5Sespie {
1006*c87b03e5Sespie code = GET_CODE (XVECEXP (x, 0, i));
1007*c87b03e5Sespie if (code == SET || code == CLOBBER)
1008*c87b03e5Sespie df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
1009*c87b03e5Sespie }
1010*c87b03e5Sespie }
1011*c87b03e5Sespie }
1012*c87b03e5Sespie
1013*c87b03e5Sespie
1014*c87b03e5Sespie /* Process all the registers used in the rtx at address LOC. */
1015*c87b03e5Sespie static void
df_uses_record(df,loc,ref_type,bb,insn,flags)1016*c87b03e5Sespie df_uses_record (df, loc, ref_type, bb, insn, flags)
1017*c87b03e5Sespie struct df *df;
1018*c87b03e5Sespie rtx *loc;
1019*c87b03e5Sespie enum df_ref_type ref_type;
1020*c87b03e5Sespie basic_block bb;
1021*c87b03e5Sespie rtx insn;
1022*c87b03e5Sespie enum df_ref_flags flags;
1023*c87b03e5Sespie {
1024*c87b03e5Sespie RTX_CODE code;
1025*c87b03e5Sespie rtx x;
1026*c87b03e5Sespie retry:
1027*c87b03e5Sespie x = *loc;
1028*c87b03e5Sespie if (!x)
1029*c87b03e5Sespie return;
1030*c87b03e5Sespie code = GET_CODE (x);
1031*c87b03e5Sespie switch (code)
1032*c87b03e5Sespie {
1033*c87b03e5Sespie case LABEL_REF:
1034*c87b03e5Sespie case SYMBOL_REF:
1035*c87b03e5Sespie case CONST_INT:
1036*c87b03e5Sespie case CONST:
1037*c87b03e5Sespie case CONST_DOUBLE:
1038*c87b03e5Sespie case CONST_VECTOR:
1039*c87b03e5Sespie case PC:
1040*c87b03e5Sespie case CC0:
1041*c87b03e5Sespie case ADDR_VEC:
1042*c87b03e5Sespie case ADDR_DIFF_VEC:
1043*c87b03e5Sespie return;
1044*c87b03e5Sespie
1045*c87b03e5Sespie case CLOBBER:
1046*c87b03e5Sespie /* If we are clobbering a MEM, mark any registers inside the address
1047*c87b03e5Sespie as being used. */
1048*c87b03e5Sespie if (GET_CODE (XEXP (x, 0)) == MEM)
1049*c87b03e5Sespie df_uses_record (df, &XEXP (XEXP (x, 0), 0),
1050*c87b03e5Sespie DF_REF_REG_MEM_STORE, bb, insn, flags);
1051*c87b03e5Sespie
1052*c87b03e5Sespie /* If we're clobbering a REG then we have a def so ignore. */
1053*c87b03e5Sespie return;
1054*c87b03e5Sespie
1055*c87b03e5Sespie case MEM:
1056*c87b03e5Sespie df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn, flags);
1057*c87b03e5Sespie return;
1058*c87b03e5Sespie
1059*c87b03e5Sespie case SUBREG:
1060*c87b03e5Sespie /* While we're here, optimize this case. */
1061*c87b03e5Sespie
1062*c87b03e5Sespie /* In case the SUBREG is not of a register, don't optimize. */
1063*c87b03e5Sespie if (GET_CODE (SUBREG_REG (x)) != REG)
1064*c87b03e5Sespie {
1065*c87b03e5Sespie loc = &SUBREG_REG (x);
1066*c87b03e5Sespie df_uses_record (df, loc, ref_type, bb, insn, flags);
1067*c87b03e5Sespie return;
1068*c87b03e5Sespie }
1069*c87b03e5Sespie #ifdef CLASS_CANNOT_CHANGE_MODE
1070*c87b03e5Sespie if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
1071*c87b03e5Sespie GET_MODE (SUBREG_REG (x))))
1072*c87b03e5Sespie flags |= DF_REF_MODE_CHANGE;
1073*c87b03e5Sespie #endif
1074*c87b03e5Sespie
1075*c87b03e5Sespie /* ... Fall through ... */
1076*c87b03e5Sespie
1077*c87b03e5Sespie case REG:
1078*c87b03e5Sespie /* See a register (or subreg) other than being set. */
1079*c87b03e5Sespie df_ref_record (df, x, loc, insn, ref_type, flags);
1080*c87b03e5Sespie return;
1081*c87b03e5Sespie
1082*c87b03e5Sespie case SET:
1083*c87b03e5Sespie {
1084*c87b03e5Sespie rtx dst = SET_DEST (x);
1085*c87b03e5Sespie
1086*c87b03e5Sespie df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1087*c87b03e5Sespie
1088*c87b03e5Sespie switch (GET_CODE (dst))
1089*c87b03e5Sespie {
1090*c87b03e5Sespie enum df_ref_flags use_flags;
1091*c87b03e5Sespie case SUBREG:
1092*c87b03e5Sespie if (read_modify_subreg_p (dst))
1093*c87b03e5Sespie {
1094*c87b03e5Sespie use_flags = DF_REF_READ_WRITE;
1095*c87b03e5Sespie #ifdef CLASS_CANNOT_CHANGE_MODE
1096*c87b03e5Sespie if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
1097*c87b03e5Sespie GET_MODE (SUBREG_REG (dst))))
1098*c87b03e5Sespie use_flags |= DF_REF_MODE_CHANGE;
1099*c87b03e5Sespie #endif
1100*c87b03e5Sespie df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1101*c87b03e5Sespie insn, use_flags);
1102*c87b03e5Sespie break;
1103*c87b03e5Sespie }
1104*c87b03e5Sespie /* ... FALLTHRU ... */
1105*c87b03e5Sespie case REG:
1106*c87b03e5Sespie case PARALLEL:
1107*c87b03e5Sespie case PC:
1108*c87b03e5Sespie case CC0:
1109*c87b03e5Sespie break;
1110*c87b03e5Sespie case MEM:
1111*c87b03e5Sespie df_uses_record (df, &XEXP (dst, 0),
1112*c87b03e5Sespie DF_REF_REG_MEM_STORE,
1113*c87b03e5Sespie bb, insn, 0);
1114*c87b03e5Sespie break;
1115*c87b03e5Sespie case STRICT_LOW_PART:
1116*c87b03e5Sespie /* A strict_low_part uses the whole reg not only the subreg. */
1117*c87b03e5Sespie dst = XEXP (dst, 0);
1118*c87b03e5Sespie if (GET_CODE (dst) != SUBREG)
1119*c87b03e5Sespie abort ();
1120*c87b03e5Sespie use_flags = DF_REF_READ_WRITE;
1121*c87b03e5Sespie #ifdef CLASS_CANNOT_CHANGE_MODE
1122*c87b03e5Sespie if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
1123*c87b03e5Sespie GET_MODE (SUBREG_REG (dst))))
1124*c87b03e5Sespie use_flags |= DF_REF_MODE_CHANGE;
1125*c87b03e5Sespie #endif
1126*c87b03e5Sespie df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1127*c87b03e5Sespie insn, use_flags);
1128*c87b03e5Sespie break;
1129*c87b03e5Sespie case ZERO_EXTRACT:
1130*c87b03e5Sespie case SIGN_EXTRACT:
1131*c87b03e5Sespie df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn,
1132*c87b03e5Sespie DF_REF_READ_WRITE);
1133*c87b03e5Sespie df_uses_record (df, &XEXP (dst, 1), DF_REF_REG_USE, bb, insn, 0);
1134*c87b03e5Sespie df_uses_record (df, &XEXP (dst, 2), DF_REF_REG_USE, bb, insn, 0);
1135*c87b03e5Sespie dst = XEXP (dst, 0);
1136*c87b03e5Sespie break;
1137*c87b03e5Sespie default:
1138*c87b03e5Sespie abort ();
1139*c87b03e5Sespie }
1140*c87b03e5Sespie return;
1141*c87b03e5Sespie }
1142*c87b03e5Sespie
1143*c87b03e5Sespie case RETURN:
1144*c87b03e5Sespie break;
1145*c87b03e5Sespie
1146*c87b03e5Sespie case ASM_OPERANDS:
1147*c87b03e5Sespie case UNSPEC_VOLATILE:
1148*c87b03e5Sespie case TRAP_IF:
1149*c87b03e5Sespie case ASM_INPUT:
1150*c87b03e5Sespie {
1151*c87b03e5Sespie /* Traditional and volatile asm instructions must be considered to use
1152*c87b03e5Sespie and clobber all hard registers, all pseudo-registers and all of
1153*c87b03e5Sespie memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
1154*c87b03e5Sespie
1155*c87b03e5Sespie Consider for instance a volatile asm that changes the fpu rounding
1156*c87b03e5Sespie mode. An insn should not be moved across this even if it only uses
1157*c87b03e5Sespie pseudo-regs because it might give an incorrectly rounded result.
1158*c87b03e5Sespie
1159*c87b03e5Sespie For now, just mark any regs we can find in ASM_OPERANDS as
1160*c87b03e5Sespie used. */
1161*c87b03e5Sespie
1162*c87b03e5Sespie /* For all ASM_OPERANDS, we must traverse the vector of input operands.
1163*c87b03e5Sespie We can not just fall through here since then we would be confused
1164*c87b03e5Sespie by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1165*c87b03e5Sespie traditional asms unlike their normal usage. */
1166*c87b03e5Sespie if (code == ASM_OPERANDS)
1167*c87b03e5Sespie {
1168*c87b03e5Sespie int j;
1169*c87b03e5Sespie
1170*c87b03e5Sespie for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1171*c87b03e5Sespie df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1172*c87b03e5Sespie DF_REF_REG_USE, bb, insn, 0);
1173*c87b03e5Sespie return;
1174*c87b03e5Sespie }
1175*c87b03e5Sespie break;
1176*c87b03e5Sespie }
1177*c87b03e5Sespie
1178*c87b03e5Sespie case PRE_DEC:
1179*c87b03e5Sespie case POST_DEC:
1180*c87b03e5Sespie case PRE_INC:
1181*c87b03e5Sespie case POST_INC:
1182*c87b03e5Sespie case PRE_MODIFY:
1183*c87b03e5Sespie case POST_MODIFY:
1184*c87b03e5Sespie /* Catch the def of the register being modified. */
1185*c87b03e5Sespie df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), insn, DF_REF_REG_DEF, DF_REF_READ_WRITE);
1186*c87b03e5Sespie
1187*c87b03e5Sespie /* ... Fall through to handle uses ... */
1188*c87b03e5Sespie
1189*c87b03e5Sespie default:
1190*c87b03e5Sespie break;
1191*c87b03e5Sespie }
1192*c87b03e5Sespie
1193*c87b03e5Sespie /* Recursively scan the operands of this expression. */
1194*c87b03e5Sespie {
1195*c87b03e5Sespie const char *fmt = GET_RTX_FORMAT (code);
1196*c87b03e5Sespie int i;
1197*c87b03e5Sespie
1198*c87b03e5Sespie for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1199*c87b03e5Sespie {
1200*c87b03e5Sespie if (fmt[i] == 'e')
1201*c87b03e5Sespie {
1202*c87b03e5Sespie /* Tail recursive case: save a function call level. */
1203*c87b03e5Sespie if (i == 0)
1204*c87b03e5Sespie {
1205*c87b03e5Sespie loc = &XEXP (x, 0);
1206*c87b03e5Sespie goto retry;
1207*c87b03e5Sespie }
1208*c87b03e5Sespie df_uses_record (df, &XEXP (x, i), ref_type, bb, insn, flags);
1209*c87b03e5Sespie }
1210*c87b03e5Sespie else if (fmt[i] == 'E')
1211*c87b03e5Sespie {
1212*c87b03e5Sespie int j;
1213*c87b03e5Sespie for (j = 0; j < XVECLEN (x, i); j++)
1214*c87b03e5Sespie df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1215*c87b03e5Sespie bb, insn, flags);
1216*c87b03e5Sespie }
1217*c87b03e5Sespie }
1218*c87b03e5Sespie }
1219*c87b03e5Sespie }
1220*c87b03e5Sespie
1221*c87b03e5Sespie
1222*c87b03e5Sespie /* Record all the df within INSN of basic block BB. */
1223*c87b03e5Sespie static void
df_insn_refs_record(df,bb,insn)1224*c87b03e5Sespie df_insn_refs_record (df, bb, insn)
1225*c87b03e5Sespie struct df *df;
1226*c87b03e5Sespie basic_block bb;
1227*c87b03e5Sespie rtx insn;
1228*c87b03e5Sespie {
1229*c87b03e5Sespie int i;
1230*c87b03e5Sespie
1231*c87b03e5Sespie if (INSN_P (insn))
1232*c87b03e5Sespie {
1233*c87b03e5Sespie rtx note;
1234*c87b03e5Sespie
1235*c87b03e5Sespie /* Record register defs */
1236*c87b03e5Sespie df_defs_record (df, PATTERN (insn), bb, insn);
1237*c87b03e5Sespie
1238*c87b03e5Sespie if (df->flags & DF_EQUIV_NOTES)
1239*c87b03e5Sespie for (note = REG_NOTES (insn); note;
1240*c87b03e5Sespie note = XEXP (note, 1))
1241*c87b03e5Sespie {
1242*c87b03e5Sespie switch (REG_NOTE_KIND (note))
1243*c87b03e5Sespie {
1244*c87b03e5Sespie case REG_EQUIV:
1245*c87b03e5Sespie case REG_EQUAL:
1246*c87b03e5Sespie df_uses_record (df, &XEXP (note, 0), DF_REF_REG_USE,
1247*c87b03e5Sespie bb, insn, 0);
1248*c87b03e5Sespie default:
1249*c87b03e5Sespie break;
1250*c87b03e5Sespie }
1251*c87b03e5Sespie }
1252*c87b03e5Sespie
1253*c87b03e5Sespie if (GET_CODE (insn) == CALL_INSN)
1254*c87b03e5Sespie {
1255*c87b03e5Sespie rtx note;
1256*c87b03e5Sespie rtx x;
1257*c87b03e5Sespie
1258*c87b03e5Sespie /* Record the registers used to pass arguments. */
1259*c87b03e5Sespie for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1260*c87b03e5Sespie note = XEXP (note, 1))
1261*c87b03e5Sespie {
1262*c87b03e5Sespie if (GET_CODE (XEXP (note, 0)) == USE)
1263*c87b03e5Sespie df_uses_record (df, &XEXP (XEXP (note, 0), 0), DF_REF_REG_USE,
1264*c87b03e5Sespie bb, insn, 0);
1265*c87b03e5Sespie }
1266*c87b03e5Sespie
1267*c87b03e5Sespie /* The stack ptr is used (honorarily) by a CALL insn. */
1268*c87b03e5Sespie x = df_reg_use_gen (STACK_POINTER_REGNUM);
1269*c87b03e5Sespie df_uses_record (df, &XEXP (x, 0), DF_REF_REG_USE, bb, insn, 0);
1270*c87b03e5Sespie
1271*c87b03e5Sespie if (df->flags & DF_HARD_REGS)
1272*c87b03e5Sespie {
1273*c87b03e5Sespie /* Calls may also reference any of the global registers,
1274*c87b03e5Sespie so they are recorded as used. */
1275*c87b03e5Sespie for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1276*c87b03e5Sespie if (global_regs[i])
1277*c87b03e5Sespie {
1278*c87b03e5Sespie x = df_reg_use_gen (i);
1279*c87b03e5Sespie df_uses_record (df, &SET_DEST (x),
1280*c87b03e5Sespie DF_REF_REG_USE, bb, insn, 0);
1281*c87b03e5Sespie }
1282*c87b03e5Sespie }
1283*c87b03e5Sespie }
1284*c87b03e5Sespie
1285*c87b03e5Sespie /* Record the register uses. */
1286*c87b03e5Sespie df_uses_record (df, &PATTERN (insn),
1287*c87b03e5Sespie DF_REF_REG_USE, bb, insn, 0);
1288*c87b03e5Sespie
1289*c87b03e5Sespie
1290*c87b03e5Sespie if (GET_CODE (insn) == CALL_INSN)
1291*c87b03e5Sespie {
1292*c87b03e5Sespie rtx note;
1293*c87b03e5Sespie
1294*c87b03e5Sespie if (df->flags & DF_HARD_REGS)
1295*c87b03e5Sespie {
1296*c87b03e5Sespie /* Kill all registers invalidated by a call. */
1297*c87b03e5Sespie for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1298*c87b03e5Sespie if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1299*c87b03e5Sespie {
1300*c87b03e5Sespie rtx reg_clob = df_reg_clobber_gen (i);
1301*c87b03e5Sespie df_defs_record (df, reg_clob, bb, insn);
1302*c87b03e5Sespie }
1303*c87b03e5Sespie }
1304*c87b03e5Sespie
1305*c87b03e5Sespie /* There may be extra registers to be clobbered. */
1306*c87b03e5Sespie for (note = CALL_INSN_FUNCTION_USAGE (insn);
1307*c87b03e5Sespie note;
1308*c87b03e5Sespie note = XEXP (note, 1))
1309*c87b03e5Sespie if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1310*c87b03e5Sespie df_defs_record (df, XEXP (note, 0), bb, insn);
1311*c87b03e5Sespie }
1312*c87b03e5Sespie }
1313*c87b03e5Sespie }
1314*c87b03e5Sespie
1315*c87b03e5Sespie
1316*c87b03e5Sespie /* Record all the refs within the basic block BB. */
1317*c87b03e5Sespie static void
df_bb_refs_record(df,bb)1318*c87b03e5Sespie df_bb_refs_record (df, bb)
1319*c87b03e5Sespie struct df *df;
1320*c87b03e5Sespie basic_block bb;
1321*c87b03e5Sespie {
1322*c87b03e5Sespie rtx insn;
1323*c87b03e5Sespie
1324*c87b03e5Sespie /* Scan the block an insn at a time from beginning to end. */
1325*c87b03e5Sespie for (insn = bb->head; ; insn = NEXT_INSN (insn))
1326*c87b03e5Sespie {
1327*c87b03e5Sespie if (INSN_P (insn))
1328*c87b03e5Sespie {
1329*c87b03e5Sespie /* Record defs within INSN. */
1330*c87b03e5Sespie df_insn_refs_record (df, bb, insn);
1331*c87b03e5Sespie }
1332*c87b03e5Sespie if (insn == bb->end)
1333*c87b03e5Sespie break;
1334*c87b03e5Sespie }
1335*c87b03e5Sespie }
1336*c87b03e5Sespie
1337*c87b03e5Sespie
1338*c87b03e5Sespie /* Record all the refs in the basic blocks specified by BLOCKS. */
1339*c87b03e5Sespie static void
df_refs_record(df,blocks)1340*c87b03e5Sespie df_refs_record (df, blocks)
1341*c87b03e5Sespie struct df *df;
1342*c87b03e5Sespie bitmap blocks;
1343*c87b03e5Sespie {
1344*c87b03e5Sespie basic_block bb;
1345*c87b03e5Sespie
1346*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1347*c87b03e5Sespie {
1348*c87b03e5Sespie df_bb_refs_record (df, bb);
1349*c87b03e5Sespie });
1350*c87b03e5Sespie }
1351*c87b03e5Sespie
1352*c87b03e5Sespie /* Dataflow analysis routines. */
1353*c87b03e5Sespie
1354*c87b03e5Sespie
1355*c87b03e5Sespie /* Create reg-def chains for basic block BB. These are a list of
1356*c87b03e5Sespie definitions for each register. */
1357*c87b03e5Sespie static void
df_bb_reg_def_chain_create(df,bb)1358*c87b03e5Sespie df_bb_reg_def_chain_create (df, bb)
1359*c87b03e5Sespie struct df *df;
1360*c87b03e5Sespie basic_block bb;
1361*c87b03e5Sespie {
1362*c87b03e5Sespie rtx insn;
1363*c87b03e5Sespie
1364*c87b03e5Sespie /* Perhaps the defs should be sorted using a depth first search
1365*c87b03e5Sespie of the CFG (or possibly a breadth first search). We currently
1366*c87b03e5Sespie scan the basic blocks in reverse order so that the first defs
1367*c87b03e5Sespie appear at the start of the chain. */
1368*c87b03e5Sespie
1369*c87b03e5Sespie for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1370*c87b03e5Sespie insn = PREV_INSN (insn))
1371*c87b03e5Sespie {
1372*c87b03e5Sespie struct df_link *link;
1373*c87b03e5Sespie unsigned int uid = INSN_UID (insn);
1374*c87b03e5Sespie
1375*c87b03e5Sespie if (! INSN_P (insn))
1376*c87b03e5Sespie continue;
1377*c87b03e5Sespie
1378*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
1379*c87b03e5Sespie {
1380*c87b03e5Sespie struct ref *def = link->ref;
1381*c87b03e5Sespie unsigned int dregno = DF_REF_REGNO (def);
1382*c87b03e5Sespie /* Don't add ref's to the chain two times. I.e. only add
1383*c87b03e5Sespie new refs. XXX the same could be done by testing if the current
1384*c87b03e5Sespie insn is a modified (or a new) one. This would be faster. */
1385*c87b03e5Sespie if (DF_REF_ID (def) < df->def_id_save)
1386*c87b03e5Sespie continue;
1387*c87b03e5Sespie
1388*c87b03e5Sespie df->regs[dregno].defs
1389*c87b03e5Sespie = df_link_create (def, df->regs[dregno].defs);
1390*c87b03e5Sespie }
1391*c87b03e5Sespie }
1392*c87b03e5Sespie }
1393*c87b03e5Sespie
1394*c87b03e5Sespie
1395*c87b03e5Sespie /* Create reg-def chains for each basic block within BLOCKS. These
1396*c87b03e5Sespie are a list of definitions for each register. */
1397*c87b03e5Sespie static void
df_reg_def_chain_create(df,blocks)1398*c87b03e5Sespie df_reg_def_chain_create (df, blocks)
1399*c87b03e5Sespie struct df *df;
1400*c87b03e5Sespie bitmap blocks;
1401*c87b03e5Sespie {
1402*c87b03e5Sespie basic_block bb;
1403*c87b03e5Sespie
1404*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP/*_REV*/ (blocks, 0, bb,
1405*c87b03e5Sespie {
1406*c87b03e5Sespie df_bb_reg_def_chain_create (df, bb);
1407*c87b03e5Sespie });
1408*c87b03e5Sespie }
1409*c87b03e5Sespie
1410*c87b03e5Sespie
1411*c87b03e5Sespie /* Create reg-use chains for basic block BB. These are a list of uses
1412*c87b03e5Sespie for each register. */
1413*c87b03e5Sespie static void
df_bb_reg_use_chain_create(df,bb)1414*c87b03e5Sespie df_bb_reg_use_chain_create (df, bb)
1415*c87b03e5Sespie struct df *df;
1416*c87b03e5Sespie basic_block bb;
1417*c87b03e5Sespie {
1418*c87b03e5Sespie rtx insn;
1419*c87b03e5Sespie
1420*c87b03e5Sespie /* Scan in forward order so that the last uses appear at the
1421*c87b03e5Sespie start of the chain. */
1422*c87b03e5Sespie
1423*c87b03e5Sespie for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1424*c87b03e5Sespie insn = NEXT_INSN (insn))
1425*c87b03e5Sespie {
1426*c87b03e5Sespie struct df_link *link;
1427*c87b03e5Sespie unsigned int uid = INSN_UID (insn);
1428*c87b03e5Sespie
1429*c87b03e5Sespie if (! INSN_P (insn))
1430*c87b03e5Sespie continue;
1431*c87b03e5Sespie
1432*c87b03e5Sespie for (link = df->insns[uid].uses; link; link = link->next)
1433*c87b03e5Sespie {
1434*c87b03e5Sespie struct ref *use = link->ref;
1435*c87b03e5Sespie unsigned int uregno = DF_REF_REGNO (use);
1436*c87b03e5Sespie /* Don't add ref's to the chain two times. I.e. only add
1437*c87b03e5Sespie new refs. XXX the same could be done by testing if the current
1438*c87b03e5Sespie insn is a modified (or a new) one. This would be faster. */
1439*c87b03e5Sespie if (DF_REF_ID (use) < df->use_id_save)
1440*c87b03e5Sespie continue;
1441*c87b03e5Sespie
1442*c87b03e5Sespie df->regs[uregno].uses
1443*c87b03e5Sespie = df_link_create (use, df->regs[uregno].uses);
1444*c87b03e5Sespie }
1445*c87b03e5Sespie }
1446*c87b03e5Sespie }
1447*c87b03e5Sespie
1448*c87b03e5Sespie
1449*c87b03e5Sespie /* Create reg-use chains for each basic block within BLOCKS. These
1450*c87b03e5Sespie are a list of uses for each register. */
1451*c87b03e5Sespie static void
df_reg_use_chain_create(df,blocks)1452*c87b03e5Sespie df_reg_use_chain_create (df, blocks)
1453*c87b03e5Sespie struct df *df;
1454*c87b03e5Sespie bitmap blocks;
1455*c87b03e5Sespie {
1456*c87b03e5Sespie basic_block bb;
1457*c87b03e5Sespie
1458*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1459*c87b03e5Sespie {
1460*c87b03e5Sespie df_bb_reg_use_chain_create (df, bb);
1461*c87b03e5Sespie });
1462*c87b03e5Sespie }
1463*c87b03e5Sespie
1464*c87b03e5Sespie
1465*c87b03e5Sespie /* Create def-use chains from reaching use bitmaps for basic block BB. */
1466*c87b03e5Sespie static void
df_bb_du_chain_create(df,bb,ru)1467*c87b03e5Sespie df_bb_du_chain_create (df, bb, ru)
1468*c87b03e5Sespie struct df *df;
1469*c87b03e5Sespie basic_block bb;
1470*c87b03e5Sespie bitmap ru;
1471*c87b03e5Sespie {
1472*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
1473*c87b03e5Sespie rtx insn;
1474*c87b03e5Sespie
1475*c87b03e5Sespie bitmap_copy (ru, bb_info->ru_out);
1476*c87b03e5Sespie
1477*c87b03e5Sespie /* For each def in BB create a linked list (chain) of uses
1478*c87b03e5Sespie reached from the def. */
1479*c87b03e5Sespie for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1480*c87b03e5Sespie insn = PREV_INSN (insn))
1481*c87b03e5Sespie {
1482*c87b03e5Sespie struct df_link *def_link;
1483*c87b03e5Sespie struct df_link *use_link;
1484*c87b03e5Sespie unsigned int uid = INSN_UID (insn);
1485*c87b03e5Sespie
1486*c87b03e5Sespie if (! INSN_P (insn))
1487*c87b03e5Sespie continue;
1488*c87b03e5Sespie
1489*c87b03e5Sespie /* For each def in insn... */
1490*c87b03e5Sespie for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1491*c87b03e5Sespie {
1492*c87b03e5Sespie struct ref *def = def_link->ref;
1493*c87b03e5Sespie unsigned int dregno = DF_REF_REGNO (def);
1494*c87b03e5Sespie
1495*c87b03e5Sespie DF_REF_CHAIN (def) = 0;
1496*c87b03e5Sespie
1497*c87b03e5Sespie /* While the reg-use chains are not essential, it
1498*c87b03e5Sespie is _much_ faster to search these short lists rather
1499*c87b03e5Sespie than all the reaching uses, especially for large functions. */
1500*c87b03e5Sespie for (use_link = df->regs[dregno].uses; use_link;
1501*c87b03e5Sespie use_link = use_link->next)
1502*c87b03e5Sespie {
1503*c87b03e5Sespie struct ref *use = use_link->ref;
1504*c87b03e5Sespie
1505*c87b03e5Sespie if (bitmap_bit_p (ru, DF_REF_ID (use)))
1506*c87b03e5Sespie {
1507*c87b03e5Sespie DF_REF_CHAIN (def)
1508*c87b03e5Sespie = df_link_create (use, DF_REF_CHAIN (def));
1509*c87b03e5Sespie
1510*c87b03e5Sespie bitmap_clear_bit (ru, DF_REF_ID (use));
1511*c87b03e5Sespie }
1512*c87b03e5Sespie }
1513*c87b03e5Sespie }
1514*c87b03e5Sespie
1515*c87b03e5Sespie /* For each use in insn... */
1516*c87b03e5Sespie for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1517*c87b03e5Sespie {
1518*c87b03e5Sespie struct ref *use = use_link->ref;
1519*c87b03e5Sespie bitmap_set_bit (ru, DF_REF_ID (use));
1520*c87b03e5Sespie }
1521*c87b03e5Sespie }
1522*c87b03e5Sespie }
1523*c87b03e5Sespie
1524*c87b03e5Sespie
1525*c87b03e5Sespie /* Create def-use chains from reaching use bitmaps for basic blocks
1526*c87b03e5Sespie in BLOCKS. */
1527*c87b03e5Sespie static void
df_du_chain_create(df,blocks)1528*c87b03e5Sespie df_du_chain_create (df, blocks)
1529*c87b03e5Sespie struct df *df;
1530*c87b03e5Sespie bitmap blocks;
1531*c87b03e5Sespie {
1532*c87b03e5Sespie bitmap ru;
1533*c87b03e5Sespie basic_block bb;
1534*c87b03e5Sespie
1535*c87b03e5Sespie ru = BITMAP_XMALLOC ();
1536*c87b03e5Sespie
1537*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1538*c87b03e5Sespie {
1539*c87b03e5Sespie df_bb_du_chain_create (df, bb, ru);
1540*c87b03e5Sespie });
1541*c87b03e5Sespie
1542*c87b03e5Sespie BITMAP_XFREE (ru);
1543*c87b03e5Sespie }
1544*c87b03e5Sespie
1545*c87b03e5Sespie
1546*c87b03e5Sespie /* Create use-def chains from reaching def bitmaps for basic block BB. */
1547*c87b03e5Sespie static void
df_bb_ud_chain_create(df,bb)1548*c87b03e5Sespie df_bb_ud_chain_create (df, bb)
1549*c87b03e5Sespie struct df *df;
1550*c87b03e5Sespie basic_block bb;
1551*c87b03e5Sespie {
1552*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
1553*c87b03e5Sespie struct ref **reg_def_last = df->reg_def_last;
1554*c87b03e5Sespie rtx insn;
1555*c87b03e5Sespie
1556*c87b03e5Sespie memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1557*c87b03e5Sespie
1558*c87b03e5Sespie /* For each use in BB create a linked list (chain) of defs
1559*c87b03e5Sespie that reach the use. */
1560*c87b03e5Sespie for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1561*c87b03e5Sespie insn = NEXT_INSN (insn))
1562*c87b03e5Sespie {
1563*c87b03e5Sespie unsigned int uid = INSN_UID (insn);
1564*c87b03e5Sespie struct df_link *use_link;
1565*c87b03e5Sespie struct df_link *def_link;
1566*c87b03e5Sespie
1567*c87b03e5Sespie if (! INSN_P (insn))
1568*c87b03e5Sespie continue;
1569*c87b03e5Sespie
1570*c87b03e5Sespie /* For each use in insn... */
1571*c87b03e5Sespie for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1572*c87b03e5Sespie {
1573*c87b03e5Sespie struct ref *use = use_link->ref;
1574*c87b03e5Sespie unsigned int regno = DF_REF_REGNO (use);
1575*c87b03e5Sespie
1576*c87b03e5Sespie DF_REF_CHAIN (use) = 0;
1577*c87b03e5Sespie
1578*c87b03e5Sespie /* Has regno been defined in this BB yet? If so, use
1579*c87b03e5Sespie the last def as the single entry for the use-def
1580*c87b03e5Sespie chain for this use. Otherwise, we need to add all
1581*c87b03e5Sespie the defs using this regno that reach the start of
1582*c87b03e5Sespie this BB. */
1583*c87b03e5Sespie if (reg_def_last[regno])
1584*c87b03e5Sespie {
1585*c87b03e5Sespie DF_REF_CHAIN (use)
1586*c87b03e5Sespie = df_link_create (reg_def_last[regno], 0);
1587*c87b03e5Sespie }
1588*c87b03e5Sespie else
1589*c87b03e5Sespie {
1590*c87b03e5Sespie /* While the reg-def chains are not essential, it is
1591*c87b03e5Sespie _much_ faster to search these short lists rather than
1592*c87b03e5Sespie all the reaching defs, especially for large
1593*c87b03e5Sespie functions. */
1594*c87b03e5Sespie for (def_link = df->regs[regno].defs; def_link;
1595*c87b03e5Sespie def_link = def_link->next)
1596*c87b03e5Sespie {
1597*c87b03e5Sespie struct ref *def = def_link->ref;
1598*c87b03e5Sespie
1599*c87b03e5Sespie if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1600*c87b03e5Sespie {
1601*c87b03e5Sespie DF_REF_CHAIN (use)
1602*c87b03e5Sespie = df_link_create (def, DF_REF_CHAIN (use));
1603*c87b03e5Sespie }
1604*c87b03e5Sespie }
1605*c87b03e5Sespie }
1606*c87b03e5Sespie }
1607*c87b03e5Sespie
1608*c87b03e5Sespie
1609*c87b03e5Sespie /* For each def in insn...record the last def of each reg. */
1610*c87b03e5Sespie for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1611*c87b03e5Sespie {
1612*c87b03e5Sespie struct ref *def = def_link->ref;
1613*c87b03e5Sespie int dregno = DF_REF_REGNO (def);
1614*c87b03e5Sespie
1615*c87b03e5Sespie reg_def_last[dregno] = def;
1616*c87b03e5Sespie }
1617*c87b03e5Sespie }
1618*c87b03e5Sespie }
1619*c87b03e5Sespie
1620*c87b03e5Sespie
1621*c87b03e5Sespie /* Create use-def chains from reaching def bitmaps for basic blocks
1622*c87b03e5Sespie within BLOCKS. */
1623*c87b03e5Sespie static void
df_ud_chain_create(df,blocks)1624*c87b03e5Sespie df_ud_chain_create (df, blocks)
1625*c87b03e5Sespie struct df *df;
1626*c87b03e5Sespie bitmap blocks;
1627*c87b03e5Sespie {
1628*c87b03e5Sespie basic_block bb;
1629*c87b03e5Sespie
1630*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1631*c87b03e5Sespie {
1632*c87b03e5Sespie df_bb_ud_chain_create (df, bb);
1633*c87b03e5Sespie });
1634*c87b03e5Sespie }
1635*c87b03e5Sespie
1636*c87b03e5Sespie
1637*c87b03e5Sespie
1638*c87b03e5Sespie static void
df_rd_transfer_function(bb,changed,in,out,gen,kill,data)1639*c87b03e5Sespie df_rd_transfer_function (bb, changed, in, out, gen, kill, data)
1640*c87b03e5Sespie int bb ATTRIBUTE_UNUSED;
1641*c87b03e5Sespie int *changed;
1642*c87b03e5Sespie bitmap in, out, gen, kill;
1643*c87b03e5Sespie void *data ATTRIBUTE_UNUSED;
1644*c87b03e5Sespie {
1645*c87b03e5Sespie *changed = bitmap_union_of_diff (out, gen, in, kill);
1646*c87b03e5Sespie }
1647*c87b03e5Sespie static void
df_ru_transfer_function(bb,changed,in,out,gen,kill,data)1648*c87b03e5Sespie df_ru_transfer_function (bb, changed, in, out, gen, kill, data)
1649*c87b03e5Sespie int bb ATTRIBUTE_UNUSED;
1650*c87b03e5Sespie int *changed;
1651*c87b03e5Sespie bitmap in, out, gen, kill;
1652*c87b03e5Sespie void *data ATTRIBUTE_UNUSED;
1653*c87b03e5Sespie {
1654*c87b03e5Sespie *changed = bitmap_union_of_diff (in, gen, out, kill);
1655*c87b03e5Sespie }
1656*c87b03e5Sespie
1657*c87b03e5Sespie static void
df_lr_transfer_function(bb,changed,in,out,use,def,data)1658*c87b03e5Sespie df_lr_transfer_function (bb, changed, in, out, use, def, data)
1659*c87b03e5Sespie int bb ATTRIBUTE_UNUSED;
1660*c87b03e5Sespie int *changed;
1661*c87b03e5Sespie bitmap in, out, use, def;
1662*c87b03e5Sespie void *data ATTRIBUTE_UNUSED;
1663*c87b03e5Sespie {
1664*c87b03e5Sespie *changed = bitmap_union_of_diff (in, use, out, def);
1665*c87b03e5Sespie }
1666*c87b03e5Sespie
1667*c87b03e5Sespie
1668*c87b03e5Sespie /* Compute local reaching def info for basic block BB. */
1669*c87b03e5Sespie static void
df_bb_rd_local_compute(df,bb)1670*c87b03e5Sespie df_bb_rd_local_compute (df, bb)
1671*c87b03e5Sespie struct df *df;
1672*c87b03e5Sespie basic_block bb;
1673*c87b03e5Sespie {
1674*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
1675*c87b03e5Sespie rtx insn;
1676*c87b03e5Sespie
1677*c87b03e5Sespie for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1678*c87b03e5Sespie insn = NEXT_INSN (insn))
1679*c87b03e5Sespie {
1680*c87b03e5Sespie unsigned int uid = INSN_UID (insn);
1681*c87b03e5Sespie struct df_link *def_link;
1682*c87b03e5Sespie
1683*c87b03e5Sespie if (! INSN_P (insn))
1684*c87b03e5Sespie continue;
1685*c87b03e5Sespie
1686*c87b03e5Sespie for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1687*c87b03e5Sespie {
1688*c87b03e5Sespie struct ref *def = def_link->ref;
1689*c87b03e5Sespie unsigned int regno = DF_REF_REGNO (def);
1690*c87b03e5Sespie struct df_link *def2_link;
1691*c87b03e5Sespie
1692*c87b03e5Sespie for (def2_link = df->regs[regno].defs; def2_link;
1693*c87b03e5Sespie def2_link = def2_link->next)
1694*c87b03e5Sespie {
1695*c87b03e5Sespie struct ref *def2 = def2_link->ref;
1696*c87b03e5Sespie
1697*c87b03e5Sespie /* Add all defs of this reg to the set of kills. This
1698*c87b03e5Sespie is greedy since many of these defs will not actually
1699*c87b03e5Sespie be killed by this BB but it keeps things a lot
1700*c87b03e5Sespie simpler. */
1701*c87b03e5Sespie bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1702*c87b03e5Sespie
1703*c87b03e5Sespie /* Zap from the set of gens for this BB. */
1704*c87b03e5Sespie bitmap_clear_bit (bb_info->rd_gen, DF_REF_ID (def2));
1705*c87b03e5Sespie }
1706*c87b03e5Sespie
1707*c87b03e5Sespie bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1708*c87b03e5Sespie }
1709*c87b03e5Sespie }
1710*c87b03e5Sespie
1711*c87b03e5Sespie bb_info->rd_valid = 1;
1712*c87b03e5Sespie }
1713*c87b03e5Sespie
1714*c87b03e5Sespie
1715*c87b03e5Sespie /* Compute local reaching def info for each basic block within BLOCKS. */
1716*c87b03e5Sespie static void
df_rd_local_compute(df,blocks)1717*c87b03e5Sespie df_rd_local_compute (df, blocks)
1718*c87b03e5Sespie struct df *df;
1719*c87b03e5Sespie bitmap blocks;
1720*c87b03e5Sespie {
1721*c87b03e5Sespie basic_block bb;
1722*c87b03e5Sespie
1723*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1724*c87b03e5Sespie {
1725*c87b03e5Sespie df_bb_rd_local_compute (df, bb);
1726*c87b03e5Sespie });
1727*c87b03e5Sespie }
1728*c87b03e5Sespie
1729*c87b03e5Sespie
1730*c87b03e5Sespie /* Compute local reaching use (upward exposed use) info for basic
1731*c87b03e5Sespie block BB. */
1732*c87b03e5Sespie static void
df_bb_ru_local_compute(df,bb)1733*c87b03e5Sespie df_bb_ru_local_compute (df, bb)
1734*c87b03e5Sespie struct df *df;
1735*c87b03e5Sespie basic_block bb;
1736*c87b03e5Sespie {
1737*c87b03e5Sespie /* This is much more tricky than computing reaching defs. With
1738*c87b03e5Sespie reaching defs, defs get killed by other defs. With upwards
1739*c87b03e5Sespie exposed uses, these get killed by defs with the same regno. */
1740*c87b03e5Sespie
1741*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
1742*c87b03e5Sespie rtx insn;
1743*c87b03e5Sespie
1744*c87b03e5Sespie
1745*c87b03e5Sespie for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1746*c87b03e5Sespie insn = PREV_INSN (insn))
1747*c87b03e5Sespie {
1748*c87b03e5Sespie unsigned int uid = INSN_UID (insn);
1749*c87b03e5Sespie struct df_link *def_link;
1750*c87b03e5Sespie struct df_link *use_link;
1751*c87b03e5Sespie
1752*c87b03e5Sespie if (! INSN_P (insn))
1753*c87b03e5Sespie continue;
1754*c87b03e5Sespie
1755*c87b03e5Sespie for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1756*c87b03e5Sespie {
1757*c87b03e5Sespie struct ref *def = def_link->ref;
1758*c87b03e5Sespie unsigned int dregno = DF_REF_REGNO (def);
1759*c87b03e5Sespie
1760*c87b03e5Sespie for (use_link = df->regs[dregno].uses; use_link;
1761*c87b03e5Sespie use_link = use_link->next)
1762*c87b03e5Sespie {
1763*c87b03e5Sespie struct ref *use = use_link->ref;
1764*c87b03e5Sespie
1765*c87b03e5Sespie /* Add all uses of this reg to the set of kills. This
1766*c87b03e5Sespie is greedy since many of these uses will not actually
1767*c87b03e5Sespie be killed by this BB but it keeps things a lot
1768*c87b03e5Sespie simpler. */
1769*c87b03e5Sespie bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1770*c87b03e5Sespie
1771*c87b03e5Sespie /* Zap from the set of gens for this BB. */
1772*c87b03e5Sespie bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1773*c87b03e5Sespie }
1774*c87b03e5Sespie }
1775*c87b03e5Sespie
1776*c87b03e5Sespie for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1777*c87b03e5Sespie {
1778*c87b03e5Sespie struct ref *use = use_link->ref;
1779*c87b03e5Sespie /* Add use to set of gens in this BB. */
1780*c87b03e5Sespie bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1781*c87b03e5Sespie }
1782*c87b03e5Sespie }
1783*c87b03e5Sespie bb_info->ru_valid = 1;
1784*c87b03e5Sespie }
1785*c87b03e5Sespie
1786*c87b03e5Sespie
1787*c87b03e5Sespie /* Compute local reaching use (upward exposed use) info for each basic
1788*c87b03e5Sespie block within BLOCKS. */
1789*c87b03e5Sespie static void
df_ru_local_compute(df,blocks)1790*c87b03e5Sespie df_ru_local_compute (df, blocks)
1791*c87b03e5Sespie struct df *df;
1792*c87b03e5Sespie bitmap blocks;
1793*c87b03e5Sespie {
1794*c87b03e5Sespie basic_block bb;
1795*c87b03e5Sespie
1796*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1797*c87b03e5Sespie {
1798*c87b03e5Sespie df_bb_ru_local_compute (df, bb);
1799*c87b03e5Sespie });
1800*c87b03e5Sespie }
1801*c87b03e5Sespie
1802*c87b03e5Sespie
1803*c87b03e5Sespie /* Compute local live variable info for basic block BB. */
1804*c87b03e5Sespie static void
df_bb_lr_local_compute(df,bb)1805*c87b03e5Sespie df_bb_lr_local_compute (df, bb)
1806*c87b03e5Sespie struct df *df;
1807*c87b03e5Sespie basic_block bb;
1808*c87b03e5Sespie {
1809*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
1810*c87b03e5Sespie rtx insn;
1811*c87b03e5Sespie
1812*c87b03e5Sespie for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1813*c87b03e5Sespie insn = PREV_INSN (insn))
1814*c87b03e5Sespie {
1815*c87b03e5Sespie unsigned int uid = INSN_UID (insn);
1816*c87b03e5Sespie struct df_link *link;
1817*c87b03e5Sespie
1818*c87b03e5Sespie if (! INSN_P (insn))
1819*c87b03e5Sespie continue;
1820*c87b03e5Sespie
1821*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
1822*c87b03e5Sespie {
1823*c87b03e5Sespie struct ref *def = link->ref;
1824*c87b03e5Sespie unsigned int dregno = DF_REF_REGNO (def);
1825*c87b03e5Sespie
1826*c87b03e5Sespie /* Add def to set of defs in this BB. */
1827*c87b03e5Sespie bitmap_set_bit (bb_info->lr_def, dregno);
1828*c87b03e5Sespie
1829*c87b03e5Sespie bitmap_clear_bit (bb_info->lr_use, dregno);
1830*c87b03e5Sespie }
1831*c87b03e5Sespie
1832*c87b03e5Sespie for (link = df->insns[uid].uses; link; link = link->next)
1833*c87b03e5Sespie {
1834*c87b03e5Sespie struct ref *use = link->ref;
1835*c87b03e5Sespie /* Add use to set of uses in this BB. */
1836*c87b03e5Sespie bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
1837*c87b03e5Sespie }
1838*c87b03e5Sespie }
1839*c87b03e5Sespie bb_info->lr_valid = 1;
1840*c87b03e5Sespie }
1841*c87b03e5Sespie
1842*c87b03e5Sespie
1843*c87b03e5Sespie /* Compute local live variable info for each basic block within BLOCKS. */
1844*c87b03e5Sespie static void
df_lr_local_compute(df,blocks)1845*c87b03e5Sespie df_lr_local_compute (df, blocks)
1846*c87b03e5Sespie struct df *df;
1847*c87b03e5Sespie bitmap blocks;
1848*c87b03e5Sespie {
1849*c87b03e5Sespie basic_block bb;
1850*c87b03e5Sespie
1851*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1852*c87b03e5Sespie {
1853*c87b03e5Sespie df_bb_lr_local_compute (df, bb);
1854*c87b03e5Sespie });
1855*c87b03e5Sespie }
1856*c87b03e5Sespie
1857*c87b03e5Sespie
1858*c87b03e5Sespie /* Compute register info: lifetime, bb, and number of defs and uses
1859*c87b03e5Sespie for basic block BB. */
1860*c87b03e5Sespie static void
df_bb_reg_info_compute(df,bb,live)1861*c87b03e5Sespie df_bb_reg_info_compute (df, bb, live)
1862*c87b03e5Sespie struct df *df;
1863*c87b03e5Sespie basic_block bb;
1864*c87b03e5Sespie bitmap live;
1865*c87b03e5Sespie {
1866*c87b03e5Sespie struct reg_info *reg_info = df->regs;
1867*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
1868*c87b03e5Sespie rtx insn;
1869*c87b03e5Sespie
1870*c87b03e5Sespie bitmap_copy (live, bb_info->lr_out);
1871*c87b03e5Sespie
1872*c87b03e5Sespie for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1873*c87b03e5Sespie insn = PREV_INSN (insn))
1874*c87b03e5Sespie {
1875*c87b03e5Sespie unsigned int uid = INSN_UID (insn);
1876*c87b03e5Sespie unsigned int regno;
1877*c87b03e5Sespie struct df_link *link;
1878*c87b03e5Sespie
1879*c87b03e5Sespie if (! INSN_P (insn))
1880*c87b03e5Sespie continue;
1881*c87b03e5Sespie
1882*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
1883*c87b03e5Sespie {
1884*c87b03e5Sespie struct ref *def = link->ref;
1885*c87b03e5Sespie unsigned int dregno = DF_REF_REGNO (def);
1886*c87b03e5Sespie
1887*c87b03e5Sespie /* Kill this register. */
1888*c87b03e5Sespie bitmap_clear_bit (live, dregno);
1889*c87b03e5Sespie reg_info[dregno].n_defs++;
1890*c87b03e5Sespie }
1891*c87b03e5Sespie
1892*c87b03e5Sespie for (link = df->insns[uid].uses; link; link = link->next)
1893*c87b03e5Sespie {
1894*c87b03e5Sespie struct ref *use = link->ref;
1895*c87b03e5Sespie unsigned int uregno = DF_REF_REGNO (use);
1896*c87b03e5Sespie
1897*c87b03e5Sespie /* This register is now live. */
1898*c87b03e5Sespie bitmap_set_bit (live, uregno);
1899*c87b03e5Sespie reg_info[uregno].n_uses++;
1900*c87b03e5Sespie }
1901*c87b03e5Sespie
1902*c87b03e5Sespie /* Increment lifetimes of all live registers. */
1903*c87b03e5Sespie EXECUTE_IF_SET_IN_BITMAP (live, 0, regno,
1904*c87b03e5Sespie {
1905*c87b03e5Sespie reg_info[regno].lifetime++;
1906*c87b03e5Sespie });
1907*c87b03e5Sespie }
1908*c87b03e5Sespie }
1909*c87b03e5Sespie
1910*c87b03e5Sespie
1911*c87b03e5Sespie /* Compute register info: lifetime, bb, and number of defs and uses. */
1912*c87b03e5Sespie static void
df_reg_info_compute(df,blocks)1913*c87b03e5Sespie df_reg_info_compute (df, blocks)
1914*c87b03e5Sespie struct df *df;
1915*c87b03e5Sespie bitmap blocks;
1916*c87b03e5Sespie {
1917*c87b03e5Sespie basic_block bb;
1918*c87b03e5Sespie bitmap live;
1919*c87b03e5Sespie
1920*c87b03e5Sespie live = BITMAP_XMALLOC ();
1921*c87b03e5Sespie
1922*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1923*c87b03e5Sespie {
1924*c87b03e5Sespie df_bb_reg_info_compute (df, bb, live);
1925*c87b03e5Sespie });
1926*c87b03e5Sespie
1927*c87b03e5Sespie BITMAP_XFREE (live);
1928*c87b03e5Sespie }
1929*c87b03e5Sespie
1930*c87b03e5Sespie
1931*c87b03e5Sespie /* Assign LUIDs for BB. */
1932*c87b03e5Sespie static int
df_bb_luids_set(df,bb)1933*c87b03e5Sespie df_bb_luids_set (df, bb)
1934*c87b03e5Sespie struct df *df;
1935*c87b03e5Sespie basic_block bb;
1936*c87b03e5Sespie {
1937*c87b03e5Sespie rtx insn;
1938*c87b03e5Sespie int luid = 0;
1939*c87b03e5Sespie
1940*c87b03e5Sespie /* The LUIDs are monotonically increasing for each basic block. */
1941*c87b03e5Sespie
1942*c87b03e5Sespie for (insn = bb->head; ; insn = NEXT_INSN (insn))
1943*c87b03e5Sespie {
1944*c87b03e5Sespie if (INSN_P (insn))
1945*c87b03e5Sespie DF_INSN_LUID (df, insn) = luid++;
1946*c87b03e5Sespie DF_INSN_LUID (df, insn) = luid;
1947*c87b03e5Sespie
1948*c87b03e5Sespie if (insn == bb->end)
1949*c87b03e5Sespie break;
1950*c87b03e5Sespie }
1951*c87b03e5Sespie return luid;
1952*c87b03e5Sespie }
1953*c87b03e5Sespie
1954*c87b03e5Sespie
1955*c87b03e5Sespie /* Assign LUIDs for each basic block within BLOCKS. */
1956*c87b03e5Sespie static int
df_luids_set(df,blocks)1957*c87b03e5Sespie df_luids_set (df, blocks)
1958*c87b03e5Sespie struct df *df;
1959*c87b03e5Sespie bitmap blocks;
1960*c87b03e5Sespie {
1961*c87b03e5Sespie basic_block bb;
1962*c87b03e5Sespie int total = 0;
1963*c87b03e5Sespie
1964*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1965*c87b03e5Sespie {
1966*c87b03e5Sespie total += df_bb_luids_set (df, bb);
1967*c87b03e5Sespie });
1968*c87b03e5Sespie return total;
1969*c87b03e5Sespie }
1970*c87b03e5Sespie
1971*c87b03e5Sespie /* Perform dataflow analysis using existing DF structure for blocks
1972*c87b03e5Sespie within BLOCKS. If BLOCKS is zero, use all basic blocks in the CFG. */
1973*c87b03e5Sespie static void
df_analyse_1(df,blocks,flags,update)1974*c87b03e5Sespie df_analyse_1 (df, blocks, flags, update)
1975*c87b03e5Sespie struct df *df;
1976*c87b03e5Sespie bitmap blocks;
1977*c87b03e5Sespie int flags;
1978*c87b03e5Sespie int update;
1979*c87b03e5Sespie {
1980*c87b03e5Sespie int aflags;
1981*c87b03e5Sespie int dflags;
1982*c87b03e5Sespie int i;
1983*c87b03e5Sespie basic_block bb;
1984*c87b03e5Sespie
1985*c87b03e5Sespie dflags = 0;
1986*c87b03e5Sespie aflags = flags;
1987*c87b03e5Sespie if (flags & DF_UD_CHAIN)
1988*c87b03e5Sespie aflags |= DF_RD | DF_RD_CHAIN;
1989*c87b03e5Sespie
1990*c87b03e5Sespie if (flags & DF_DU_CHAIN)
1991*c87b03e5Sespie aflags |= DF_RU;
1992*c87b03e5Sespie
1993*c87b03e5Sespie if (flags & DF_RU)
1994*c87b03e5Sespie aflags |= DF_RU_CHAIN;
1995*c87b03e5Sespie
1996*c87b03e5Sespie if (flags & DF_REG_INFO)
1997*c87b03e5Sespie aflags |= DF_LR;
1998*c87b03e5Sespie
1999*c87b03e5Sespie if (! blocks)
2000*c87b03e5Sespie blocks = df->all_blocks;
2001*c87b03e5Sespie
2002*c87b03e5Sespie df->flags = flags;
2003*c87b03e5Sespie if (update)
2004*c87b03e5Sespie {
2005*c87b03e5Sespie df_refs_update (df);
2006*c87b03e5Sespie /* More fine grained incremental dataflow analysis would be
2007*c87b03e5Sespie nice. For now recompute the whole shebang for the
2008*c87b03e5Sespie modified blocks. */
2009*c87b03e5Sespie #if 0
2010*c87b03e5Sespie df_refs_unlink (df, blocks);
2011*c87b03e5Sespie #endif
2012*c87b03e5Sespie /* All the def-use, use-def chains can be potentially
2013*c87b03e5Sespie modified by changes in one block. The size of the
2014*c87b03e5Sespie bitmaps can also change. */
2015*c87b03e5Sespie }
2016*c87b03e5Sespie else
2017*c87b03e5Sespie {
2018*c87b03e5Sespie /* Scan the function for all register defs and uses. */
2019*c87b03e5Sespie df_refs_queue (df);
2020*c87b03e5Sespie df_refs_record (df, blocks);
2021*c87b03e5Sespie
2022*c87b03e5Sespie /* Link all the new defs and uses to the insns. */
2023*c87b03e5Sespie df_refs_process (df);
2024*c87b03e5Sespie }
2025*c87b03e5Sespie
2026*c87b03e5Sespie /* Allocate the bitmaps now the total number of defs and uses are
2027*c87b03e5Sespie known. If the number of defs or uses have changed, then
2028*c87b03e5Sespie these bitmaps need to be reallocated. */
2029*c87b03e5Sespie df_bitmaps_alloc (df, aflags);
2030*c87b03e5Sespie
2031*c87b03e5Sespie /* Set the LUIDs for each specified basic block. */
2032*c87b03e5Sespie df_luids_set (df, blocks);
2033*c87b03e5Sespie
2034*c87b03e5Sespie /* Recreate reg-def and reg-use chains from scratch so that first
2035*c87b03e5Sespie def is at the head of the reg-def chain and the last use is at
2036*c87b03e5Sespie the head of the reg-use chain. This is only important for
2037*c87b03e5Sespie regs local to a basic block as it speeds up searching. */
2038*c87b03e5Sespie if (aflags & DF_RD_CHAIN)
2039*c87b03e5Sespie {
2040*c87b03e5Sespie df_reg_def_chain_create (df, blocks);
2041*c87b03e5Sespie }
2042*c87b03e5Sespie
2043*c87b03e5Sespie if (aflags & DF_RU_CHAIN)
2044*c87b03e5Sespie {
2045*c87b03e5Sespie df_reg_use_chain_create (df, blocks);
2046*c87b03e5Sespie }
2047*c87b03e5Sespie
2048*c87b03e5Sespie df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
2049*c87b03e5Sespie df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
2050*c87b03e5Sespie df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2051*c87b03e5Sespie df->inverse_dfs_map = xmalloc (sizeof (int) * last_basic_block);
2052*c87b03e5Sespie df->inverse_rc_map = xmalloc (sizeof (int) * last_basic_block);
2053*c87b03e5Sespie df->inverse_rts_map = xmalloc (sizeof (int) * last_basic_block);
2054*c87b03e5Sespie
2055*c87b03e5Sespie flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2056*c87b03e5Sespie flow_reverse_top_sort_order_compute (df->rts_order);
2057*c87b03e5Sespie for (i = 0; i < n_basic_blocks; i++)
2058*c87b03e5Sespie {
2059*c87b03e5Sespie df->inverse_dfs_map[df->dfs_order[i]] = i;
2060*c87b03e5Sespie df->inverse_rc_map[df->rc_order[i]] = i;
2061*c87b03e5Sespie df->inverse_rts_map[df->rts_order[i]] = i;
2062*c87b03e5Sespie }
2063*c87b03e5Sespie if (aflags & DF_RD)
2064*c87b03e5Sespie {
2065*c87b03e5Sespie /* Compute the sets of gens and kills for the defs of each bb. */
2066*c87b03e5Sespie df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2067*c87b03e5Sespie {
2068*c87b03e5Sespie bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2069*c87b03e5Sespie bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2070*c87b03e5Sespie bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
2071*c87b03e5Sespie bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
2072*c87b03e5Sespie FOR_EACH_BB (bb)
2073*c87b03e5Sespie {
2074*c87b03e5Sespie in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
2075*c87b03e5Sespie out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
2076*c87b03e5Sespie gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
2077*c87b03e5Sespie kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
2078*c87b03e5Sespie }
2079*c87b03e5Sespie iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2080*c87b03e5Sespie FORWARD, UNION, df_rd_transfer_function,
2081*c87b03e5Sespie df->inverse_rc_map, NULL);
2082*c87b03e5Sespie free (in);
2083*c87b03e5Sespie free (out);
2084*c87b03e5Sespie free (gen);
2085*c87b03e5Sespie free (kill);
2086*c87b03e5Sespie }
2087*c87b03e5Sespie }
2088*c87b03e5Sespie
2089*c87b03e5Sespie if (aflags & DF_UD_CHAIN)
2090*c87b03e5Sespie {
2091*c87b03e5Sespie /* Create use-def chains. */
2092*c87b03e5Sespie df_ud_chain_create (df, df->all_blocks);
2093*c87b03e5Sespie
2094*c87b03e5Sespie if (! (flags & DF_RD))
2095*c87b03e5Sespie dflags |= DF_RD;
2096*c87b03e5Sespie }
2097*c87b03e5Sespie
2098*c87b03e5Sespie if (aflags & DF_RU)
2099*c87b03e5Sespie {
2100*c87b03e5Sespie /* Compute the sets of gens and kills for the upwards exposed
2101*c87b03e5Sespie uses in each bb. */
2102*c87b03e5Sespie df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2103*c87b03e5Sespie {
2104*c87b03e5Sespie bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2105*c87b03e5Sespie bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2106*c87b03e5Sespie bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
2107*c87b03e5Sespie bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
2108*c87b03e5Sespie FOR_EACH_BB (bb)
2109*c87b03e5Sespie {
2110*c87b03e5Sespie in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
2111*c87b03e5Sespie out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
2112*c87b03e5Sespie gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
2113*c87b03e5Sespie kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
2114*c87b03e5Sespie }
2115*c87b03e5Sespie iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2116*c87b03e5Sespie BACKWARD, UNION, df_ru_transfer_function,
2117*c87b03e5Sespie df->inverse_rts_map, NULL);
2118*c87b03e5Sespie free (in);
2119*c87b03e5Sespie free (out);
2120*c87b03e5Sespie free (gen);
2121*c87b03e5Sespie free (kill);
2122*c87b03e5Sespie }
2123*c87b03e5Sespie }
2124*c87b03e5Sespie
2125*c87b03e5Sespie if (aflags & DF_DU_CHAIN)
2126*c87b03e5Sespie {
2127*c87b03e5Sespie /* Create def-use chains. */
2128*c87b03e5Sespie df_du_chain_create (df, df->all_blocks);
2129*c87b03e5Sespie
2130*c87b03e5Sespie if (! (flags & DF_RU))
2131*c87b03e5Sespie dflags |= DF_RU;
2132*c87b03e5Sespie }
2133*c87b03e5Sespie
2134*c87b03e5Sespie /* Free up bitmaps that are no longer required. */
2135*c87b03e5Sespie if (dflags)
2136*c87b03e5Sespie df_bitmaps_free (df, dflags);
2137*c87b03e5Sespie
2138*c87b03e5Sespie if (aflags & DF_LR)
2139*c87b03e5Sespie {
2140*c87b03e5Sespie /* Compute the sets of defs and uses of live variables. */
2141*c87b03e5Sespie df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2142*c87b03e5Sespie {
2143*c87b03e5Sespie bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2144*c87b03e5Sespie bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2145*c87b03e5Sespie bitmap *use = xmalloc (sizeof (bitmap) * last_basic_block);
2146*c87b03e5Sespie bitmap *def = xmalloc (sizeof (bitmap) * last_basic_block);
2147*c87b03e5Sespie FOR_EACH_BB (bb)
2148*c87b03e5Sespie {
2149*c87b03e5Sespie in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
2150*c87b03e5Sespie out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
2151*c87b03e5Sespie use[bb->index] = DF_BB_INFO (df, bb)->lr_use;
2152*c87b03e5Sespie def[bb->index] = DF_BB_INFO (df, bb)->lr_def;
2153*c87b03e5Sespie }
2154*c87b03e5Sespie iterative_dataflow_bitmap (in, out, use, def, df->all_blocks,
2155*c87b03e5Sespie BACKWARD, UNION, df_lr_transfer_function,
2156*c87b03e5Sespie df->inverse_rts_map, NULL);
2157*c87b03e5Sespie free (in);
2158*c87b03e5Sespie free (out);
2159*c87b03e5Sespie free (use);
2160*c87b03e5Sespie free (def);
2161*c87b03e5Sespie }
2162*c87b03e5Sespie }
2163*c87b03e5Sespie
2164*c87b03e5Sespie if (aflags & DF_REG_INFO)
2165*c87b03e5Sespie {
2166*c87b03e5Sespie df_reg_info_compute (df, df->all_blocks);
2167*c87b03e5Sespie }
2168*c87b03e5Sespie free (df->dfs_order);
2169*c87b03e5Sespie free (df->rc_order);
2170*c87b03e5Sespie free (df->rts_order);
2171*c87b03e5Sespie free (df->inverse_rc_map);
2172*c87b03e5Sespie free (df->inverse_dfs_map);
2173*c87b03e5Sespie free (df->inverse_rts_map);
2174*c87b03e5Sespie }
2175*c87b03e5Sespie
2176*c87b03e5Sespie
2177*c87b03e5Sespie /* Initialize dataflow analysis. */
2178*c87b03e5Sespie struct df *
df_init()2179*c87b03e5Sespie df_init ()
2180*c87b03e5Sespie {
2181*c87b03e5Sespie struct df *df;
2182*c87b03e5Sespie
2183*c87b03e5Sespie df = xcalloc (1, sizeof (struct df));
2184*c87b03e5Sespie
2185*c87b03e5Sespie /* Squirrel away a global for debugging. */
2186*c87b03e5Sespie ddf = df;
2187*c87b03e5Sespie
2188*c87b03e5Sespie return df;
2189*c87b03e5Sespie }
2190*c87b03e5Sespie
2191*c87b03e5Sespie
2192*c87b03e5Sespie /* Start queuing refs. */
2193*c87b03e5Sespie static int
df_refs_queue(df)2194*c87b03e5Sespie df_refs_queue (df)
2195*c87b03e5Sespie struct df *df;
2196*c87b03e5Sespie {
2197*c87b03e5Sespie df->def_id_save = df->def_id;
2198*c87b03e5Sespie df->use_id_save = df->use_id;
2199*c87b03e5Sespie /* ???? Perhaps we should save current obstack state so that we can
2200*c87b03e5Sespie unwind it. */
2201*c87b03e5Sespie return 0;
2202*c87b03e5Sespie }
2203*c87b03e5Sespie
2204*c87b03e5Sespie
2205*c87b03e5Sespie /* Process queued refs. */
2206*c87b03e5Sespie static int
df_refs_process(df)2207*c87b03e5Sespie df_refs_process (df)
2208*c87b03e5Sespie struct df *df;
2209*c87b03e5Sespie {
2210*c87b03e5Sespie unsigned int i;
2211*c87b03e5Sespie
2212*c87b03e5Sespie /* Build new insn-def chains. */
2213*c87b03e5Sespie for (i = df->def_id_save; i != df->def_id; i++)
2214*c87b03e5Sespie {
2215*c87b03e5Sespie struct ref *def = df->defs[i];
2216*c87b03e5Sespie unsigned int uid = DF_REF_INSN_UID (def);
2217*c87b03e5Sespie
2218*c87b03e5Sespie /* Add def to head of def list for INSN. */
2219*c87b03e5Sespie df->insns[uid].defs
2220*c87b03e5Sespie = df_link_create (def, df->insns[uid].defs);
2221*c87b03e5Sespie }
2222*c87b03e5Sespie
2223*c87b03e5Sespie /* Build new insn-use chains. */
2224*c87b03e5Sespie for (i = df->use_id_save; i != df->use_id; i++)
2225*c87b03e5Sespie {
2226*c87b03e5Sespie struct ref *use = df->uses[i];
2227*c87b03e5Sespie unsigned int uid = DF_REF_INSN_UID (use);
2228*c87b03e5Sespie
2229*c87b03e5Sespie /* Add use to head of use list for INSN. */
2230*c87b03e5Sespie df->insns[uid].uses
2231*c87b03e5Sespie = df_link_create (use, df->insns[uid].uses);
2232*c87b03e5Sespie }
2233*c87b03e5Sespie return 0;
2234*c87b03e5Sespie }
2235*c87b03e5Sespie
2236*c87b03e5Sespie
2237*c87b03e5Sespie /* Update refs for basic block BB. */
2238*c87b03e5Sespie static int
df_bb_refs_update(df,bb)2239*c87b03e5Sespie df_bb_refs_update (df, bb)
2240*c87b03e5Sespie struct df *df;
2241*c87b03e5Sespie basic_block bb;
2242*c87b03e5Sespie {
2243*c87b03e5Sespie rtx insn;
2244*c87b03e5Sespie int count = 0;
2245*c87b03e5Sespie
2246*c87b03e5Sespie /* While we have to scan the chain of insns for this BB, we don't
2247*c87b03e5Sespie need to allocate and queue a long chain of BB/INSN pairs. Using
2248*c87b03e5Sespie a bitmap for insns_modified saves memory and avoids queuing
2249*c87b03e5Sespie duplicates. */
2250*c87b03e5Sespie
2251*c87b03e5Sespie for (insn = bb->head; ; insn = NEXT_INSN (insn))
2252*c87b03e5Sespie {
2253*c87b03e5Sespie unsigned int uid;
2254*c87b03e5Sespie
2255*c87b03e5Sespie uid = INSN_UID (insn);
2256*c87b03e5Sespie
2257*c87b03e5Sespie if (bitmap_bit_p (df->insns_modified, uid))
2258*c87b03e5Sespie {
2259*c87b03e5Sespie /* Delete any allocated refs of this insn. MPH, FIXME. */
2260*c87b03e5Sespie df_insn_refs_unlink (df, bb, insn);
2261*c87b03e5Sespie
2262*c87b03e5Sespie /* Scan the insn for refs. */
2263*c87b03e5Sespie df_insn_refs_record (df, bb, insn);
2264*c87b03e5Sespie
2265*c87b03e5Sespie count++;
2266*c87b03e5Sespie }
2267*c87b03e5Sespie if (insn == bb->end)
2268*c87b03e5Sespie break;
2269*c87b03e5Sespie }
2270*c87b03e5Sespie return count;
2271*c87b03e5Sespie }
2272*c87b03e5Sespie
2273*c87b03e5Sespie
2274*c87b03e5Sespie /* Process all the modified/deleted insns that were queued. */
2275*c87b03e5Sespie static int
df_refs_update(df)2276*c87b03e5Sespie df_refs_update (df)
2277*c87b03e5Sespie struct df *df;
2278*c87b03e5Sespie {
2279*c87b03e5Sespie basic_block bb;
2280*c87b03e5Sespie int count = 0;
2281*c87b03e5Sespie
2282*c87b03e5Sespie if ((unsigned int) max_reg_num () >= df->reg_size)
2283*c87b03e5Sespie df_reg_table_realloc (df, 0);
2284*c87b03e5Sespie
2285*c87b03e5Sespie df_refs_queue (df);
2286*c87b03e5Sespie
2287*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2288*c87b03e5Sespie {
2289*c87b03e5Sespie count += df_bb_refs_update (df, bb);
2290*c87b03e5Sespie });
2291*c87b03e5Sespie
2292*c87b03e5Sespie df_refs_process (df);
2293*c87b03e5Sespie return count;
2294*c87b03e5Sespie }
2295*c87b03e5Sespie
2296*c87b03e5Sespie
2297*c87b03e5Sespie /* Return nonzero if any of the requested blocks in the bitmap
2298*c87b03e5Sespie BLOCKS have been modified. */
2299*c87b03e5Sespie static int
df_modified_p(df,blocks)2300*c87b03e5Sespie df_modified_p (df, blocks)
2301*c87b03e5Sespie struct df *df;
2302*c87b03e5Sespie bitmap blocks;
2303*c87b03e5Sespie {
2304*c87b03e5Sespie int update = 0;
2305*c87b03e5Sespie basic_block bb;
2306*c87b03e5Sespie
2307*c87b03e5Sespie if (!df->n_bbs)
2308*c87b03e5Sespie return 0;
2309*c87b03e5Sespie
2310*c87b03e5Sespie FOR_EACH_BB (bb)
2311*c87b03e5Sespie if (bitmap_bit_p (df->bbs_modified, bb->index)
2312*c87b03e5Sespie && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->index)))
2313*c87b03e5Sespie {
2314*c87b03e5Sespie update = 1;
2315*c87b03e5Sespie break;
2316*c87b03e5Sespie }
2317*c87b03e5Sespie
2318*c87b03e5Sespie return update;
2319*c87b03e5Sespie }
2320*c87b03e5Sespie
2321*c87b03e5Sespie
2322*c87b03e5Sespie /* Analyse dataflow info for the basic blocks specified by the bitmap
2323*c87b03e5Sespie BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2324*c87b03e5Sespie modified blocks if BLOCKS is -1. */
2325*c87b03e5Sespie int
df_analyse(df,blocks,flags)2326*c87b03e5Sespie df_analyse (df, blocks, flags)
2327*c87b03e5Sespie struct df *df;
2328*c87b03e5Sespie bitmap blocks;
2329*c87b03e5Sespie int flags;
2330*c87b03e5Sespie {
2331*c87b03e5Sespie int update;
2332*c87b03e5Sespie
2333*c87b03e5Sespie /* We could deal with additional basic blocks being created by
2334*c87b03e5Sespie rescanning everything again. */
2335*c87b03e5Sespie if (df->n_bbs && df->n_bbs != (unsigned int) last_basic_block)
2336*c87b03e5Sespie abort ();
2337*c87b03e5Sespie
2338*c87b03e5Sespie update = df_modified_p (df, blocks);
2339*c87b03e5Sespie if (update || (flags != df->flags))
2340*c87b03e5Sespie {
2341*c87b03e5Sespie if (! blocks)
2342*c87b03e5Sespie {
2343*c87b03e5Sespie if (df->n_bbs)
2344*c87b03e5Sespie {
2345*c87b03e5Sespie /* Recompute everything from scratch. */
2346*c87b03e5Sespie df_free (df);
2347*c87b03e5Sespie }
2348*c87b03e5Sespie /* Allocate and initialize data structures. */
2349*c87b03e5Sespie df_alloc (df, max_reg_num ());
2350*c87b03e5Sespie df_analyse_1 (df, 0, flags, 0);
2351*c87b03e5Sespie update = 1;
2352*c87b03e5Sespie }
2353*c87b03e5Sespie else
2354*c87b03e5Sespie {
2355*c87b03e5Sespie if (blocks == (bitmap) -1)
2356*c87b03e5Sespie blocks = df->bbs_modified;
2357*c87b03e5Sespie
2358*c87b03e5Sespie if (! df->n_bbs)
2359*c87b03e5Sespie abort ();
2360*c87b03e5Sespie
2361*c87b03e5Sespie df_analyse_1 (df, blocks, flags, 1);
2362*c87b03e5Sespie bitmap_zero (df->bbs_modified);
2363*c87b03e5Sespie bitmap_zero (df->insns_modified);
2364*c87b03e5Sespie }
2365*c87b03e5Sespie }
2366*c87b03e5Sespie return update;
2367*c87b03e5Sespie }
2368*c87b03e5Sespie
2369*c87b03e5Sespie
2370*c87b03e5Sespie /* Free all the dataflow info and the DF structure. */
2371*c87b03e5Sespie void
df_finish(df)2372*c87b03e5Sespie df_finish (df)
2373*c87b03e5Sespie struct df *df;
2374*c87b03e5Sespie {
2375*c87b03e5Sespie df_free (df);
2376*c87b03e5Sespie free (df);
2377*c87b03e5Sespie }
2378*c87b03e5Sespie
2379*c87b03e5Sespie
2380*c87b03e5Sespie /* Unlink INSN from its reference information. */
2381*c87b03e5Sespie static void
df_insn_refs_unlink(df,bb,insn)2382*c87b03e5Sespie df_insn_refs_unlink (df, bb, insn)
2383*c87b03e5Sespie struct df *df;
2384*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
2385*c87b03e5Sespie rtx insn;
2386*c87b03e5Sespie {
2387*c87b03e5Sespie struct df_link *link;
2388*c87b03e5Sespie unsigned int uid;
2389*c87b03e5Sespie
2390*c87b03e5Sespie uid = INSN_UID (insn);
2391*c87b03e5Sespie
2392*c87b03e5Sespie /* Unlink all refs defined by this insn. */
2393*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
2394*c87b03e5Sespie df_def_unlink (df, link->ref);
2395*c87b03e5Sespie
2396*c87b03e5Sespie /* Unlink all refs used by this insn. */
2397*c87b03e5Sespie for (link = df->insns[uid].uses; link; link = link->next)
2398*c87b03e5Sespie df_use_unlink (df, link->ref);
2399*c87b03e5Sespie
2400*c87b03e5Sespie df->insns[uid].defs = 0;
2401*c87b03e5Sespie df->insns[uid].uses = 0;
2402*c87b03e5Sespie }
2403*c87b03e5Sespie
2404*c87b03e5Sespie
2405*c87b03e5Sespie #if 0
2406*c87b03e5Sespie /* Unlink all the insns within BB from their reference information. */
2407*c87b03e5Sespie static void
2408*c87b03e5Sespie df_bb_refs_unlink (df, bb)
2409*c87b03e5Sespie struct df *df;
2410*c87b03e5Sespie basic_block bb;
2411*c87b03e5Sespie {
2412*c87b03e5Sespie rtx insn;
2413*c87b03e5Sespie
2414*c87b03e5Sespie /* Scan the block an insn at a time from beginning to end. */
2415*c87b03e5Sespie for (insn = bb->head; ; insn = NEXT_INSN (insn))
2416*c87b03e5Sespie {
2417*c87b03e5Sespie if (INSN_P (insn))
2418*c87b03e5Sespie {
2419*c87b03e5Sespie /* Unlink refs for INSN. */
2420*c87b03e5Sespie df_insn_refs_unlink (df, bb, insn);
2421*c87b03e5Sespie }
2422*c87b03e5Sespie if (insn == bb->end)
2423*c87b03e5Sespie break;
2424*c87b03e5Sespie }
2425*c87b03e5Sespie }
2426*c87b03e5Sespie
2427*c87b03e5Sespie
2428*c87b03e5Sespie /* Unlink all the refs in the basic blocks specified by BLOCKS.
2429*c87b03e5Sespie Not currently used. */
2430*c87b03e5Sespie static void
2431*c87b03e5Sespie df_refs_unlink (df, blocks)
2432*c87b03e5Sespie struct df *df;
2433*c87b03e5Sespie bitmap blocks;
2434*c87b03e5Sespie {
2435*c87b03e5Sespie basic_block bb;
2436*c87b03e5Sespie
2437*c87b03e5Sespie if (blocks)
2438*c87b03e5Sespie {
2439*c87b03e5Sespie FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2440*c87b03e5Sespie {
2441*c87b03e5Sespie df_bb_refs_unlink (df, bb);
2442*c87b03e5Sespie });
2443*c87b03e5Sespie }
2444*c87b03e5Sespie else
2445*c87b03e5Sespie {
2446*c87b03e5Sespie FOR_EACH_BB (bb)
2447*c87b03e5Sespie df_bb_refs_unlink (df, bb);
2448*c87b03e5Sespie }
2449*c87b03e5Sespie }
2450*c87b03e5Sespie #endif
2451*c87b03e5Sespie
2452*c87b03e5Sespie /* Functions to modify insns. */
2453*c87b03e5Sespie
2454*c87b03e5Sespie
2455*c87b03e5Sespie /* Delete INSN and all its reference information. */
2456*c87b03e5Sespie rtx
df_insn_delete(df,bb,insn)2457*c87b03e5Sespie df_insn_delete (df, bb, insn)
2458*c87b03e5Sespie struct df *df;
2459*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
2460*c87b03e5Sespie rtx insn;
2461*c87b03e5Sespie {
2462*c87b03e5Sespie /* If the insn is a jump, we should perhaps call delete_insn to
2463*c87b03e5Sespie handle the JUMP_LABEL? */
2464*c87b03e5Sespie
2465*c87b03e5Sespie /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
2466*c87b03e5Sespie if (insn == bb->head)
2467*c87b03e5Sespie abort ();
2468*c87b03e5Sespie
2469*c87b03e5Sespie /* Delete the insn. */
2470*c87b03e5Sespie delete_insn (insn);
2471*c87b03e5Sespie
2472*c87b03e5Sespie df_insn_modify (df, bb, insn);
2473*c87b03e5Sespie
2474*c87b03e5Sespie return NEXT_INSN (insn);
2475*c87b03e5Sespie }
2476*c87b03e5Sespie
2477*c87b03e5Sespie
2478*c87b03e5Sespie /* Mark that INSN within BB may have changed (created/modified/deleted).
2479*c87b03e5Sespie This may be called multiple times for the same insn. There is no
2480*c87b03e5Sespie harm calling this function if the insn wasn't changed; it will just
2481*c87b03e5Sespie slow down the rescanning of refs. */
2482*c87b03e5Sespie void
df_insn_modify(df,bb,insn)2483*c87b03e5Sespie df_insn_modify (df, bb, insn)
2484*c87b03e5Sespie struct df *df;
2485*c87b03e5Sespie basic_block bb;
2486*c87b03e5Sespie rtx insn;
2487*c87b03e5Sespie {
2488*c87b03e5Sespie unsigned int uid;
2489*c87b03e5Sespie
2490*c87b03e5Sespie uid = INSN_UID (insn);
2491*c87b03e5Sespie if (uid >= df->insn_size)
2492*c87b03e5Sespie df_insn_table_realloc (df, uid);
2493*c87b03e5Sespie
2494*c87b03e5Sespie bitmap_set_bit (df->bbs_modified, bb->index);
2495*c87b03e5Sespie bitmap_set_bit (df->insns_modified, uid);
2496*c87b03e5Sespie
2497*c87b03e5Sespie /* For incremental updating on the fly, perhaps we could make a copy
2498*c87b03e5Sespie of all the refs of the original insn and turn them into
2499*c87b03e5Sespie anti-refs. When df_refs_update finds these anti-refs, it annihilates
2500*c87b03e5Sespie the original refs. If validate_change fails then these anti-refs
2501*c87b03e5Sespie will just get ignored. */
2502*c87b03e5Sespie }
2503*c87b03e5Sespie
2504*c87b03e5Sespie
2505*c87b03e5Sespie typedef struct replace_args {
2506*c87b03e5Sespie rtx match;
2507*c87b03e5Sespie rtx replacement;
2508*c87b03e5Sespie rtx insn;
2509*c87b03e5Sespie int modified;
2510*c87b03e5Sespie } replace_args;
2511*c87b03e5Sespie
2512*c87b03e5Sespie
2513*c87b03e5Sespie /* Replace mem pointed to by PX with its associated pseudo register.
2514*c87b03e5Sespie DATA is actually a pointer to a structure describing the
2515*c87b03e5Sespie instruction currently being scanned and the MEM we are currently
2516*c87b03e5Sespie replacing. */
2517*c87b03e5Sespie static int
df_rtx_mem_replace(px,data)2518*c87b03e5Sespie df_rtx_mem_replace (px, data)
2519*c87b03e5Sespie rtx *px;
2520*c87b03e5Sespie void *data;
2521*c87b03e5Sespie {
2522*c87b03e5Sespie replace_args *args = (replace_args *) data;
2523*c87b03e5Sespie rtx mem = *px;
2524*c87b03e5Sespie
2525*c87b03e5Sespie if (mem == NULL_RTX)
2526*c87b03e5Sespie return 0;
2527*c87b03e5Sespie
2528*c87b03e5Sespie switch (GET_CODE (mem))
2529*c87b03e5Sespie {
2530*c87b03e5Sespie case MEM:
2531*c87b03e5Sespie break;
2532*c87b03e5Sespie
2533*c87b03e5Sespie case CONST_DOUBLE:
2534*c87b03e5Sespie /* We're not interested in the MEM associated with a
2535*c87b03e5Sespie CONST_DOUBLE, so there's no need to traverse into one. */
2536*c87b03e5Sespie return -1;
2537*c87b03e5Sespie
2538*c87b03e5Sespie default:
2539*c87b03e5Sespie /* This is not a MEM. */
2540*c87b03e5Sespie return 0;
2541*c87b03e5Sespie }
2542*c87b03e5Sespie
2543*c87b03e5Sespie if (!rtx_equal_p (args->match, mem))
2544*c87b03e5Sespie /* This is not the MEM we are currently replacing. */
2545*c87b03e5Sespie return 0;
2546*c87b03e5Sespie
2547*c87b03e5Sespie /* Actually replace the MEM. */
2548*c87b03e5Sespie validate_change (args->insn, px, args->replacement, 1);
2549*c87b03e5Sespie args->modified++;
2550*c87b03e5Sespie
2551*c87b03e5Sespie return 0;
2552*c87b03e5Sespie }
2553*c87b03e5Sespie
2554*c87b03e5Sespie
2555*c87b03e5Sespie int
df_insn_mem_replace(df,bb,insn,mem,reg)2556*c87b03e5Sespie df_insn_mem_replace (df, bb, insn, mem, reg)
2557*c87b03e5Sespie struct df *df;
2558*c87b03e5Sespie basic_block bb;
2559*c87b03e5Sespie rtx insn;
2560*c87b03e5Sespie rtx mem;
2561*c87b03e5Sespie rtx reg;
2562*c87b03e5Sespie {
2563*c87b03e5Sespie replace_args args;
2564*c87b03e5Sespie
2565*c87b03e5Sespie args.insn = insn;
2566*c87b03e5Sespie args.match = mem;
2567*c87b03e5Sespie args.replacement = reg;
2568*c87b03e5Sespie args.modified = 0;
2569*c87b03e5Sespie
2570*c87b03e5Sespie /* Search and replace all matching mems within insn. */
2571*c87b03e5Sespie for_each_rtx (&insn, df_rtx_mem_replace, &args);
2572*c87b03e5Sespie
2573*c87b03e5Sespie if (args.modified)
2574*c87b03e5Sespie df_insn_modify (df, bb, insn);
2575*c87b03e5Sespie
2576*c87b03e5Sespie /* ???? FIXME. We may have a new def or one or more new uses of REG
2577*c87b03e5Sespie in INSN. REG should be a new pseudo so it won't affect the
2578*c87b03e5Sespie dataflow information that we currently have. We should add
2579*c87b03e5Sespie the new uses and defs to INSN and then recreate the chains
2580*c87b03e5Sespie when df_analyse is called. */
2581*c87b03e5Sespie return args.modified;
2582*c87b03e5Sespie }
2583*c87b03e5Sespie
2584*c87b03e5Sespie
2585*c87b03e5Sespie /* Replace one register with another. Called through for_each_rtx; PX
2586*c87b03e5Sespie points to the rtx being scanned. DATA is actually a pointer to a
2587*c87b03e5Sespie structure of arguments. */
2588*c87b03e5Sespie static int
df_rtx_reg_replace(px,data)2589*c87b03e5Sespie df_rtx_reg_replace (px, data)
2590*c87b03e5Sespie rtx *px;
2591*c87b03e5Sespie void *data;
2592*c87b03e5Sespie {
2593*c87b03e5Sespie rtx x = *px;
2594*c87b03e5Sespie replace_args *args = (replace_args *) data;
2595*c87b03e5Sespie
2596*c87b03e5Sespie if (x == NULL_RTX)
2597*c87b03e5Sespie return 0;
2598*c87b03e5Sespie
2599*c87b03e5Sespie if (x == args->match)
2600*c87b03e5Sespie {
2601*c87b03e5Sespie validate_change (args->insn, px, args->replacement, 1);
2602*c87b03e5Sespie args->modified++;
2603*c87b03e5Sespie }
2604*c87b03e5Sespie
2605*c87b03e5Sespie return 0;
2606*c87b03e5Sespie }
2607*c87b03e5Sespie
2608*c87b03e5Sespie
2609*c87b03e5Sespie /* Replace the reg within every ref on CHAIN that is within the set
2610*c87b03e5Sespie BLOCKS of basic blocks with NEWREG. Also update the regs within
2611*c87b03e5Sespie REG_NOTES. */
2612*c87b03e5Sespie void
df_refs_reg_replace(df,blocks,chain,oldreg,newreg)2613*c87b03e5Sespie df_refs_reg_replace (df, blocks, chain, oldreg, newreg)
2614*c87b03e5Sespie struct df *df;
2615*c87b03e5Sespie bitmap blocks;
2616*c87b03e5Sespie struct df_link *chain;
2617*c87b03e5Sespie rtx oldreg;
2618*c87b03e5Sespie rtx newreg;
2619*c87b03e5Sespie {
2620*c87b03e5Sespie struct df_link *link;
2621*c87b03e5Sespie replace_args args;
2622*c87b03e5Sespie
2623*c87b03e5Sespie if (! blocks)
2624*c87b03e5Sespie blocks = df->all_blocks;
2625*c87b03e5Sespie
2626*c87b03e5Sespie args.match = oldreg;
2627*c87b03e5Sespie args.replacement = newreg;
2628*c87b03e5Sespie args.modified = 0;
2629*c87b03e5Sespie
2630*c87b03e5Sespie for (link = chain; link; link = link->next)
2631*c87b03e5Sespie {
2632*c87b03e5Sespie struct ref *ref = link->ref;
2633*c87b03e5Sespie rtx insn = DF_REF_INSN (ref);
2634*c87b03e5Sespie
2635*c87b03e5Sespie if (! INSN_P (insn))
2636*c87b03e5Sespie continue;
2637*c87b03e5Sespie
2638*c87b03e5Sespie if (bitmap_bit_p (blocks, DF_REF_BBNO (ref)))
2639*c87b03e5Sespie {
2640*c87b03e5Sespie df_ref_reg_replace (df, ref, oldreg, newreg);
2641*c87b03e5Sespie
2642*c87b03e5Sespie /* Replace occurrences of the reg within the REG_NOTES. */
2643*c87b03e5Sespie if ((! link->next || DF_REF_INSN (ref)
2644*c87b03e5Sespie != DF_REF_INSN (link->next->ref))
2645*c87b03e5Sespie && REG_NOTES (insn))
2646*c87b03e5Sespie {
2647*c87b03e5Sespie args.insn = insn;
2648*c87b03e5Sespie for_each_rtx (®_NOTES (insn), df_rtx_reg_replace, &args);
2649*c87b03e5Sespie }
2650*c87b03e5Sespie }
2651*c87b03e5Sespie else
2652*c87b03e5Sespie {
2653*c87b03e5Sespie /* Temporary check to ensure that we have a grip on which
2654*c87b03e5Sespie regs should be replaced. */
2655*c87b03e5Sespie abort ();
2656*c87b03e5Sespie }
2657*c87b03e5Sespie }
2658*c87b03e5Sespie }
2659*c87b03e5Sespie
2660*c87b03e5Sespie
2661*c87b03e5Sespie /* Replace all occurrences of register OLDREG with register NEWREG in
2662*c87b03e5Sespie blocks defined by bitmap BLOCKS. This also replaces occurrences of
2663*c87b03e5Sespie OLDREG in the REG_NOTES but only for insns containing OLDREG. This
2664*c87b03e5Sespie routine expects the reg-use and reg-def chains to be valid. */
2665*c87b03e5Sespie int
df_reg_replace(df,blocks,oldreg,newreg)2666*c87b03e5Sespie df_reg_replace (df, blocks, oldreg, newreg)
2667*c87b03e5Sespie struct df *df;
2668*c87b03e5Sespie bitmap blocks;
2669*c87b03e5Sespie rtx oldreg;
2670*c87b03e5Sespie rtx newreg;
2671*c87b03e5Sespie {
2672*c87b03e5Sespie unsigned int oldregno = REGNO (oldreg);
2673*c87b03e5Sespie
2674*c87b03e5Sespie df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2675*c87b03e5Sespie df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2676*c87b03e5Sespie return 1;
2677*c87b03e5Sespie }
2678*c87b03e5Sespie
2679*c87b03e5Sespie
2680*c87b03e5Sespie /* Try replacing the reg within REF with NEWREG. Do not modify
2681*c87b03e5Sespie def-use/use-def chains. */
2682*c87b03e5Sespie int
df_ref_reg_replace(df,ref,oldreg,newreg)2683*c87b03e5Sespie df_ref_reg_replace (df, ref, oldreg, newreg)
2684*c87b03e5Sespie struct df *df;
2685*c87b03e5Sespie struct ref *ref;
2686*c87b03e5Sespie rtx oldreg;
2687*c87b03e5Sespie rtx newreg;
2688*c87b03e5Sespie {
2689*c87b03e5Sespie /* Check that insn was deleted by being converted into a NOTE. If
2690*c87b03e5Sespie so ignore this insn. */
2691*c87b03e5Sespie if (! INSN_P (DF_REF_INSN (ref)))
2692*c87b03e5Sespie return 0;
2693*c87b03e5Sespie
2694*c87b03e5Sespie if (oldreg && oldreg != DF_REF_REG (ref))
2695*c87b03e5Sespie abort ();
2696*c87b03e5Sespie
2697*c87b03e5Sespie if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2698*c87b03e5Sespie return 0;
2699*c87b03e5Sespie
2700*c87b03e5Sespie df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2701*c87b03e5Sespie return 1;
2702*c87b03e5Sespie }
2703*c87b03e5Sespie
2704*c87b03e5Sespie
2705*c87b03e5Sespie struct ref*
df_bb_def_use_swap(df,bb,def_insn,use_insn,regno)2706*c87b03e5Sespie df_bb_def_use_swap (df, bb, def_insn, use_insn, regno)
2707*c87b03e5Sespie struct df * df;
2708*c87b03e5Sespie basic_block bb;
2709*c87b03e5Sespie rtx def_insn;
2710*c87b03e5Sespie rtx use_insn;
2711*c87b03e5Sespie unsigned int regno;
2712*c87b03e5Sespie {
2713*c87b03e5Sespie struct ref *def;
2714*c87b03e5Sespie struct ref *use;
2715*c87b03e5Sespie int def_uid;
2716*c87b03e5Sespie int use_uid;
2717*c87b03e5Sespie struct df_link *link;
2718*c87b03e5Sespie
2719*c87b03e5Sespie def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2720*c87b03e5Sespie if (! def)
2721*c87b03e5Sespie return 0;
2722*c87b03e5Sespie
2723*c87b03e5Sespie use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2724*c87b03e5Sespie if (! use)
2725*c87b03e5Sespie return 0;
2726*c87b03e5Sespie
2727*c87b03e5Sespie /* The USE no longer exists. */
2728*c87b03e5Sespie use_uid = INSN_UID (use_insn);
2729*c87b03e5Sespie df_use_unlink (df, use);
2730*c87b03e5Sespie df_ref_unlink (&df->insns[use_uid].uses, use);
2731*c87b03e5Sespie
2732*c87b03e5Sespie /* The DEF requires shifting so remove it from DEF_INSN
2733*c87b03e5Sespie and add it to USE_INSN by reusing LINK. */
2734*c87b03e5Sespie def_uid = INSN_UID (def_insn);
2735*c87b03e5Sespie link = df_ref_unlink (&df->insns[def_uid].defs, def);
2736*c87b03e5Sespie link->ref = def;
2737*c87b03e5Sespie link->next = df->insns[use_uid].defs;
2738*c87b03e5Sespie df->insns[use_uid].defs = link;
2739*c87b03e5Sespie
2740*c87b03e5Sespie #if 0
2741*c87b03e5Sespie link = df_ref_unlink (&df->regs[regno].defs, def);
2742*c87b03e5Sespie link->ref = def;
2743*c87b03e5Sespie link->next = df->regs[regno].defs;
2744*c87b03e5Sespie df->insns[regno].defs = link;
2745*c87b03e5Sespie #endif
2746*c87b03e5Sespie
2747*c87b03e5Sespie DF_REF_INSN (def) = use_insn;
2748*c87b03e5Sespie return def;
2749*c87b03e5Sespie }
2750*c87b03e5Sespie
2751*c87b03e5Sespie
2752*c87b03e5Sespie /* Record df between FIRST_INSN and LAST_INSN inclusive. All new
2753*c87b03e5Sespie insns must be processed by this routine. */
2754*c87b03e5Sespie static void
df_insns_modify(df,bb,first_insn,last_insn)2755*c87b03e5Sespie df_insns_modify (df, bb, first_insn, last_insn)
2756*c87b03e5Sespie struct df *df;
2757*c87b03e5Sespie basic_block bb;
2758*c87b03e5Sespie rtx first_insn;
2759*c87b03e5Sespie rtx last_insn;
2760*c87b03e5Sespie {
2761*c87b03e5Sespie rtx insn;
2762*c87b03e5Sespie
2763*c87b03e5Sespie for (insn = first_insn; ; insn = NEXT_INSN (insn))
2764*c87b03e5Sespie {
2765*c87b03e5Sespie unsigned int uid;
2766*c87b03e5Sespie
2767*c87b03e5Sespie /* A non-const call should not have slipped through the net. If
2768*c87b03e5Sespie it does, we need to create a new basic block. Ouch. The
2769*c87b03e5Sespie same applies for a label. */
2770*c87b03e5Sespie if ((GET_CODE (insn) == CALL_INSN
2771*c87b03e5Sespie && ! CONST_OR_PURE_CALL_P (insn))
2772*c87b03e5Sespie || GET_CODE (insn) == CODE_LABEL)
2773*c87b03e5Sespie abort ();
2774*c87b03e5Sespie
2775*c87b03e5Sespie uid = INSN_UID (insn);
2776*c87b03e5Sespie
2777*c87b03e5Sespie if (uid >= df->insn_size)
2778*c87b03e5Sespie df_insn_table_realloc (df, uid);
2779*c87b03e5Sespie
2780*c87b03e5Sespie df_insn_modify (df, bb, insn);
2781*c87b03e5Sespie
2782*c87b03e5Sespie if (insn == last_insn)
2783*c87b03e5Sespie break;
2784*c87b03e5Sespie }
2785*c87b03e5Sespie }
2786*c87b03e5Sespie
2787*c87b03e5Sespie
2788*c87b03e5Sespie /* Emit PATTERN before INSN within BB. */
2789*c87b03e5Sespie rtx
df_pattern_emit_before(df,pattern,bb,insn)2790*c87b03e5Sespie df_pattern_emit_before (df, pattern, bb, insn)
2791*c87b03e5Sespie struct df *df ATTRIBUTE_UNUSED;
2792*c87b03e5Sespie rtx pattern;
2793*c87b03e5Sespie basic_block bb;
2794*c87b03e5Sespie rtx insn;
2795*c87b03e5Sespie {
2796*c87b03e5Sespie rtx ret_insn;
2797*c87b03e5Sespie rtx prev_insn = PREV_INSN (insn);
2798*c87b03e5Sespie
2799*c87b03e5Sespie /* We should not be inserting before the start of the block. */
2800*c87b03e5Sespie if (insn == bb->head)
2801*c87b03e5Sespie abort ();
2802*c87b03e5Sespie ret_insn = emit_insn_before (pattern, insn);
2803*c87b03e5Sespie if (ret_insn == insn)
2804*c87b03e5Sespie return ret_insn;
2805*c87b03e5Sespie
2806*c87b03e5Sespie df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2807*c87b03e5Sespie return ret_insn;
2808*c87b03e5Sespie }
2809*c87b03e5Sespie
2810*c87b03e5Sespie
2811*c87b03e5Sespie /* Emit PATTERN after INSN within BB. */
2812*c87b03e5Sespie rtx
df_pattern_emit_after(df,pattern,bb,insn)2813*c87b03e5Sespie df_pattern_emit_after (df, pattern, bb, insn)
2814*c87b03e5Sespie struct df *df;
2815*c87b03e5Sespie rtx pattern;
2816*c87b03e5Sespie basic_block bb;
2817*c87b03e5Sespie rtx insn;
2818*c87b03e5Sespie {
2819*c87b03e5Sespie rtx ret_insn;
2820*c87b03e5Sespie
2821*c87b03e5Sespie ret_insn = emit_insn_after (pattern, insn);
2822*c87b03e5Sespie if (ret_insn == insn)
2823*c87b03e5Sespie return ret_insn;
2824*c87b03e5Sespie
2825*c87b03e5Sespie df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2826*c87b03e5Sespie return ret_insn;
2827*c87b03e5Sespie }
2828*c87b03e5Sespie
2829*c87b03e5Sespie
2830*c87b03e5Sespie /* Emit jump PATTERN after INSN within BB. */
2831*c87b03e5Sespie rtx
df_jump_pattern_emit_after(df,pattern,bb,insn)2832*c87b03e5Sespie df_jump_pattern_emit_after (df, pattern, bb, insn)
2833*c87b03e5Sespie struct df *df;
2834*c87b03e5Sespie rtx pattern;
2835*c87b03e5Sespie basic_block bb;
2836*c87b03e5Sespie rtx insn;
2837*c87b03e5Sespie {
2838*c87b03e5Sespie rtx ret_insn;
2839*c87b03e5Sespie
2840*c87b03e5Sespie ret_insn = emit_jump_insn_after (pattern, insn);
2841*c87b03e5Sespie if (ret_insn == insn)
2842*c87b03e5Sespie return ret_insn;
2843*c87b03e5Sespie
2844*c87b03e5Sespie df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2845*c87b03e5Sespie return ret_insn;
2846*c87b03e5Sespie }
2847*c87b03e5Sespie
2848*c87b03e5Sespie
2849*c87b03e5Sespie /* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2850*c87b03e5Sespie
2851*c87b03e5Sespie This function should only be used to move loop invariant insns
2852*c87b03e5Sespie out of a loop where it has been proven that the def-use info
2853*c87b03e5Sespie will still be valid. */
2854*c87b03e5Sespie rtx
df_insn_move_before(df,bb,insn,before_bb,before_insn)2855*c87b03e5Sespie df_insn_move_before (df, bb, insn, before_bb, before_insn)
2856*c87b03e5Sespie struct df *df;
2857*c87b03e5Sespie basic_block bb;
2858*c87b03e5Sespie rtx insn;
2859*c87b03e5Sespie basic_block before_bb;
2860*c87b03e5Sespie rtx before_insn;
2861*c87b03e5Sespie {
2862*c87b03e5Sespie struct df_link *link;
2863*c87b03e5Sespie unsigned int uid;
2864*c87b03e5Sespie
2865*c87b03e5Sespie if (! bb)
2866*c87b03e5Sespie return df_pattern_emit_before (df, insn, before_bb, before_insn);
2867*c87b03e5Sespie
2868*c87b03e5Sespie uid = INSN_UID (insn);
2869*c87b03e5Sespie
2870*c87b03e5Sespie /* Change bb for all df defined and used by this insn. */
2871*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
2872*c87b03e5Sespie DF_REF_BB (link->ref) = before_bb;
2873*c87b03e5Sespie for (link = df->insns[uid].uses; link; link = link->next)
2874*c87b03e5Sespie DF_REF_BB (link->ref) = before_bb;
2875*c87b03e5Sespie
2876*c87b03e5Sespie /* The lifetimes of the registers used in this insn will be reduced
2877*c87b03e5Sespie while the lifetimes of the registers defined in this insn
2878*c87b03e5Sespie are likely to be increased. */
2879*c87b03e5Sespie
2880*c87b03e5Sespie /* ???? Perhaps all the insns moved should be stored on a list
2881*c87b03e5Sespie which df_analyse removes when it recalculates data flow. */
2882*c87b03e5Sespie
2883*c87b03e5Sespie return emit_insn_before (insn, before_insn);
2884*c87b03e5Sespie }
2885*c87b03e5Sespie
2886*c87b03e5Sespie /* Functions to query dataflow information. */
2887*c87b03e5Sespie
2888*c87b03e5Sespie
2889*c87b03e5Sespie int
df_insn_regno_def_p(df,bb,insn,regno)2890*c87b03e5Sespie df_insn_regno_def_p (df, bb, insn, regno)
2891*c87b03e5Sespie struct df *df;
2892*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
2893*c87b03e5Sespie rtx insn;
2894*c87b03e5Sespie unsigned int regno;
2895*c87b03e5Sespie {
2896*c87b03e5Sespie unsigned int uid;
2897*c87b03e5Sespie struct df_link *link;
2898*c87b03e5Sespie
2899*c87b03e5Sespie uid = INSN_UID (insn);
2900*c87b03e5Sespie
2901*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
2902*c87b03e5Sespie {
2903*c87b03e5Sespie struct ref *def = link->ref;
2904*c87b03e5Sespie
2905*c87b03e5Sespie if (DF_REF_REGNO (def) == regno)
2906*c87b03e5Sespie return 1;
2907*c87b03e5Sespie }
2908*c87b03e5Sespie
2909*c87b03e5Sespie return 0;
2910*c87b03e5Sespie }
2911*c87b03e5Sespie
2912*c87b03e5Sespie
2913*c87b03e5Sespie static int
df_def_dominates_all_uses_p(df,def)2914*c87b03e5Sespie df_def_dominates_all_uses_p (df, def)
2915*c87b03e5Sespie struct df *df ATTRIBUTE_UNUSED;
2916*c87b03e5Sespie struct ref *def;
2917*c87b03e5Sespie {
2918*c87b03e5Sespie struct df_link *du_link;
2919*c87b03e5Sespie
2920*c87b03e5Sespie /* Follow def-use chain to find all the uses of this def. */
2921*c87b03e5Sespie for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2922*c87b03e5Sespie {
2923*c87b03e5Sespie struct ref *use = du_link->ref;
2924*c87b03e5Sespie struct df_link *ud_link;
2925*c87b03e5Sespie
2926*c87b03e5Sespie /* Follow use-def chain to check all the defs for this use. */
2927*c87b03e5Sespie for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2928*c87b03e5Sespie if (ud_link->ref != def)
2929*c87b03e5Sespie return 0;
2930*c87b03e5Sespie }
2931*c87b03e5Sespie return 1;
2932*c87b03e5Sespie }
2933*c87b03e5Sespie
2934*c87b03e5Sespie
2935*c87b03e5Sespie int
df_insn_dominates_all_uses_p(df,bb,insn)2936*c87b03e5Sespie df_insn_dominates_all_uses_p (df, bb, insn)
2937*c87b03e5Sespie struct df *df;
2938*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
2939*c87b03e5Sespie rtx insn;
2940*c87b03e5Sespie {
2941*c87b03e5Sespie unsigned int uid;
2942*c87b03e5Sespie struct df_link *link;
2943*c87b03e5Sespie
2944*c87b03e5Sespie uid = INSN_UID (insn);
2945*c87b03e5Sespie
2946*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
2947*c87b03e5Sespie {
2948*c87b03e5Sespie struct ref *def = link->ref;
2949*c87b03e5Sespie
2950*c87b03e5Sespie if (! df_def_dominates_all_uses_p (df, def))
2951*c87b03e5Sespie return 0;
2952*c87b03e5Sespie }
2953*c87b03e5Sespie
2954*c87b03e5Sespie return 1;
2955*c87b03e5Sespie }
2956*c87b03e5Sespie
2957*c87b03e5Sespie
2958*c87b03e5Sespie /* Return nonzero if all DF dominates all the uses within the bitmap
2959*c87b03e5Sespie BLOCKS. */
2960*c87b03e5Sespie static int
df_def_dominates_uses_p(df,def,blocks)2961*c87b03e5Sespie df_def_dominates_uses_p (df, def, blocks)
2962*c87b03e5Sespie struct df *df ATTRIBUTE_UNUSED;
2963*c87b03e5Sespie struct ref *def;
2964*c87b03e5Sespie bitmap blocks;
2965*c87b03e5Sespie {
2966*c87b03e5Sespie struct df_link *du_link;
2967*c87b03e5Sespie
2968*c87b03e5Sespie /* Follow def-use chain to find all the uses of this def. */
2969*c87b03e5Sespie for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2970*c87b03e5Sespie {
2971*c87b03e5Sespie struct ref *use = du_link->ref;
2972*c87b03e5Sespie struct df_link *ud_link;
2973*c87b03e5Sespie
2974*c87b03e5Sespie /* Only worry about the uses within BLOCKS. For example,
2975*c87b03e5Sespie consider a register defined within a loop that is live at the
2976*c87b03e5Sespie loop exits. */
2977*c87b03e5Sespie if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
2978*c87b03e5Sespie {
2979*c87b03e5Sespie /* Follow use-def chain to check all the defs for this use. */
2980*c87b03e5Sespie for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2981*c87b03e5Sespie if (ud_link->ref != def)
2982*c87b03e5Sespie return 0;
2983*c87b03e5Sespie }
2984*c87b03e5Sespie }
2985*c87b03e5Sespie return 1;
2986*c87b03e5Sespie }
2987*c87b03e5Sespie
2988*c87b03e5Sespie
2989*c87b03e5Sespie /* Return nonzero if all the defs of INSN within BB dominates
2990*c87b03e5Sespie all the corresponding uses. */
2991*c87b03e5Sespie int
df_insn_dominates_uses_p(df,bb,insn,blocks)2992*c87b03e5Sespie df_insn_dominates_uses_p (df, bb, insn, blocks)
2993*c87b03e5Sespie struct df *df;
2994*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
2995*c87b03e5Sespie rtx insn;
2996*c87b03e5Sespie bitmap blocks;
2997*c87b03e5Sespie {
2998*c87b03e5Sespie unsigned int uid;
2999*c87b03e5Sespie struct df_link *link;
3000*c87b03e5Sespie
3001*c87b03e5Sespie uid = INSN_UID (insn);
3002*c87b03e5Sespie
3003*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
3004*c87b03e5Sespie {
3005*c87b03e5Sespie struct ref *def = link->ref;
3006*c87b03e5Sespie
3007*c87b03e5Sespie /* Only consider the defs within BLOCKS. */
3008*c87b03e5Sespie if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
3009*c87b03e5Sespie && ! df_def_dominates_uses_p (df, def, blocks))
3010*c87b03e5Sespie return 0;
3011*c87b03e5Sespie }
3012*c87b03e5Sespie return 1;
3013*c87b03e5Sespie }
3014*c87b03e5Sespie
3015*c87b03e5Sespie
3016*c87b03e5Sespie /* Return the basic block that REG referenced in or NULL if referenced
3017*c87b03e5Sespie in multiple basic blocks. */
3018*c87b03e5Sespie basic_block
df_regno_bb(df,regno)3019*c87b03e5Sespie df_regno_bb (df, regno)
3020*c87b03e5Sespie struct df *df;
3021*c87b03e5Sespie unsigned int regno;
3022*c87b03e5Sespie {
3023*c87b03e5Sespie struct df_link *defs = df->regs[regno].defs;
3024*c87b03e5Sespie struct df_link *uses = df->regs[regno].uses;
3025*c87b03e5Sespie struct ref *def = defs ? defs->ref : 0;
3026*c87b03e5Sespie struct ref *use = uses ? uses->ref : 0;
3027*c87b03e5Sespie basic_block bb_def = def ? DF_REF_BB (def) : 0;
3028*c87b03e5Sespie basic_block bb_use = use ? DF_REF_BB (use) : 0;
3029*c87b03e5Sespie
3030*c87b03e5Sespie /* Compare blocks of first def and last use. ???? FIXME. What if
3031*c87b03e5Sespie the reg-def and reg-use lists are not correctly ordered. */
3032*c87b03e5Sespie return bb_def == bb_use ? bb_def : 0;
3033*c87b03e5Sespie }
3034*c87b03e5Sespie
3035*c87b03e5Sespie
3036*c87b03e5Sespie /* Return nonzero if REG used in multiple basic blocks. */
3037*c87b03e5Sespie int
df_reg_global_p(df,reg)3038*c87b03e5Sespie df_reg_global_p (df, reg)
3039*c87b03e5Sespie struct df *df;
3040*c87b03e5Sespie rtx reg;
3041*c87b03e5Sespie {
3042*c87b03e5Sespie return df_regno_bb (df, REGNO (reg)) != 0;
3043*c87b03e5Sespie }
3044*c87b03e5Sespie
3045*c87b03e5Sespie
3046*c87b03e5Sespie /* Return total lifetime (in insns) of REG. */
3047*c87b03e5Sespie int
df_reg_lifetime(df,reg)3048*c87b03e5Sespie df_reg_lifetime (df, reg)
3049*c87b03e5Sespie struct df *df;
3050*c87b03e5Sespie rtx reg;
3051*c87b03e5Sespie {
3052*c87b03e5Sespie return df->regs[REGNO (reg)].lifetime;
3053*c87b03e5Sespie }
3054*c87b03e5Sespie
3055*c87b03e5Sespie
3056*c87b03e5Sespie /* Return nonzero if REG live at start of BB. */
3057*c87b03e5Sespie int
df_bb_reg_live_start_p(df,bb,reg)3058*c87b03e5Sespie df_bb_reg_live_start_p (df, bb, reg)
3059*c87b03e5Sespie struct df *df ATTRIBUTE_UNUSED;
3060*c87b03e5Sespie basic_block bb;
3061*c87b03e5Sespie rtx reg;
3062*c87b03e5Sespie {
3063*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
3064*c87b03e5Sespie
3065*c87b03e5Sespie #ifdef ENABLE_CHECKING
3066*c87b03e5Sespie if (! bb_info->lr_in)
3067*c87b03e5Sespie abort ();
3068*c87b03e5Sespie #endif
3069*c87b03e5Sespie
3070*c87b03e5Sespie return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3071*c87b03e5Sespie }
3072*c87b03e5Sespie
3073*c87b03e5Sespie
3074*c87b03e5Sespie /* Return nonzero if REG live at end of BB. */
3075*c87b03e5Sespie int
df_bb_reg_live_end_p(df,bb,reg)3076*c87b03e5Sespie df_bb_reg_live_end_p (df, bb, reg)
3077*c87b03e5Sespie struct df *df ATTRIBUTE_UNUSED;
3078*c87b03e5Sespie basic_block bb;
3079*c87b03e5Sespie rtx reg;
3080*c87b03e5Sespie {
3081*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
3082*c87b03e5Sespie
3083*c87b03e5Sespie #ifdef ENABLE_CHECKING
3084*c87b03e5Sespie if (! bb_info->lr_in)
3085*c87b03e5Sespie abort ();
3086*c87b03e5Sespie #endif
3087*c87b03e5Sespie
3088*c87b03e5Sespie return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3089*c87b03e5Sespie }
3090*c87b03e5Sespie
3091*c87b03e5Sespie
3092*c87b03e5Sespie /* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3093*c87b03e5Sespie after life of REG2, or 0, if the lives overlap. */
3094*c87b03e5Sespie int
df_bb_regs_lives_compare(df,bb,reg1,reg2)3095*c87b03e5Sespie df_bb_regs_lives_compare (df, bb, reg1, reg2)
3096*c87b03e5Sespie struct df *df;
3097*c87b03e5Sespie basic_block bb;
3098*c87b03e5Sespie rtx reg1;
3099*c87b03e5Sespie rtx reg2;
3100*c87b03e5Sespie {
3101*c87b03e5Sespie unsigned int regno1 = REGNO (reg1);
3102*c87b03e5Sespie unsigned int regno2 = REGNO (reg2);
3103*c87b03e5Sespie struct ref *def1;
3104*c87b03e5Sespie struct ref *use1;
3105*c87b03e5Sespie struct ref *def2;
3106*c87b03e5Sespie struct ref *use2;
3107*c87b03e5Sespie
3108*c87b03e5Sespie
3109*c87b03e5Sespie /* The regs must be local to BB. */
3110*c87b03e5Sespie if (df_regno_bb (df, regno1) != bb
3111*c87b03e5Sespie || df_regno_bb (df, regno2) != bb)
3112*c87b03e5Sespie abort ();
3113*c87b03e5Sespie
3114*c87b03e5Sespie def2 = df_bb_regno_first_def_find (df, bb, regno2);
3115*c87b03e5Sespie use1 = df_bb_regno_last_use_find (df, bb, regno1);
3116*c87b03e5Sespie
3117*c87b03e5Sespie if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3118*c87b03e5Sespie > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3119*c87b03e5Sespie return -1;
3120*c87b03e5Sespie
3121*c87b03e5Sespie def1 = df_bb_regno_first_def_find (df, bb, regno1);
3122*c87b03e5Sespie use2 = df_bb_regno_last_use_find (df, bb, regno2);
3123*c87b03e5Sespie
3124*c87b03e5Sespie if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3125*c87b03e5Sespie > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3126*c87b03e5Sespie return 1;
3127*c87b03e5Sespie
3128*c87b03e5Sespie return 0;
3129*c87b03e5Sespie }
3130*c87b03e5Sespie
3131*c87b03e5Sespie
3132*c87b03e5Sespie /* Return last use of REGNO within BB. */
3133*c87b03e5Sespie static struct ref *
df_bb_regno_last_use_find(df,bb,regno)3134*c87b03e5Sespie df_bb_regno_last_use_find (df, bb, regno)
3135*c87b03e5Sespie struct df * df;
3136*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
3137*c87b03e5Sespie unsigned int regno;
3138*c87b03e5Sespie {
3139*c87b03e5Sespie struct df_link *link;
3140*c87b03e5Sespie
3141*c87b03e5Sespie /* This assumes that the reg-use list is ordered such that for any
3142*c87b03e5Sespie BB, the last use is found first. However, since the BBs are not
3143*c87b03e5Sespie ordered, the first use in the chain is not necessarily the last
3144*c87b03e5Sespie use in the function. */
3145*c87b03e5Sespie for (link = df->regs[regno].uses; link; link = link->next)
3146*c87b03e5Sespie {
3147*c87b03e5Sespie struct ref *use = link->ref;
3148*c87b03e5Sespie
3149*c87b03e5Sespie if (DF_REF_BB (use) == bb)
3150*c87b03e5Sespie return use;
3151*c87b03e5Sespie }
3152*c87b03e5Sespie return 0;
3153*c87b03e5Sespie }
3154*c87b03e5Sespie
3155*c87b03e5Sespie
3156*c87b03e5Sespie /* Return first def of REGNO within BB. */
3157*c87b03e5Sespie static struct ref *
df_bb_regno_first_def_find(df,bb,regno)3158*c87b03e5Sespie df_bb_regno_first_def_find (df, bb, regno)
3159*c87b03e5Sespie struct df * df;
3160*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
3161*c87b03e5Sespie unsigned int regno;
3162*c87b03e5Sespie {
3163*c87b03e5Sespie struct df_link *link;
3164*c87b03e5Sespie
3165*c87b03e5Sespie /* This assumes that the reg-def list is ordered such that for any
3166*c87b03e5Sespie BB, the first def is found first. However, since the BBs are not
3167*c87b03e5Sespie ordered, the first def in the chain is not necessarily the first
3168*c87b03e5Sespie def in the function. */
3169*c87b03e5Sespie for (link = df->regs[regno].defs; link; link = link->next)
3170*c87b03e5Sespie {
3171*c87b03e5Sespie struct ref *def = link->ref;
3172*c87b03e5Sespie
3173*c87b03e5Sespie if (DF_REF_BB (def) == bb)
3174*c87b03e5Sespie return def;
3175*c87b03e5Sespie }
3176*c87b03e5Sespie return 0;
3177*c87b03e5Sespie }
3178*c87b03e5Sespie
3179*c87b03e5Sespie
3180*c87b03e5Sespie /* Return first use of REGNO inside INSN within BB. */
3181*c87b03e5Sespie static struct ref *
df_bb_insn_regno_last_use_find(df,bb,insn,regno)3182*c87b03e5Sespie df_bb_insn_regno_last_use_find (df, bb, insn, regno)
3183*c87b03e5Sespie struct df * df;
3184*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
3185*c87b03e5Sespie rtx insn;
3186*c87b03e5Sespie unsigned int regno;
3187*c87b03e5Sespie {
3188*c87b03e5Sespie unsigned int uid;
3189*c87b03e5Sespie struct df_link *link;
3190*c87b03e5Sespie
3191*c87b03e5Sespie uid = INSN_UID (insn);
3192*c87b03e5Sespie
3193*c87b03e5Sespie for (link = df->insns[uid].uses; link; link = link->next)
3194*c87b03e5Sespie {
3195*c87b03e5Sespie struct ref *use = link->ref;
3196*c87b03e5Sespie
3197*c87b03e5Sespie if (DF_REF_REGNO (use) == regno)
3198*c87b03e5Sespie return use;
3199*c87b03e5Sespie }
3200*c87b03e5Sespie
3201*c87b03e5Sespie return 0;
3202*c87b03e5Sespie }
3203*c87b03e5Sespie
3204*c87b03e5Sespie
3205*c87b03e5Sespie /* Return first def of REGNO inside INSN within BB. */
3206*c87b03e5Sespie static struct ref *
df_bb_insn_regno_first_def_find(df,bb,insn,regno)3207*c87b03e5Sespie df_bb_insn_regno_first_def_find (df, bb, insn, regno)
3208*c87b03e5Sespie struct df * df;
3209*c87b03e5Sespie basic_block bb ATTRIBUTE_UNUSED;
3210*c87b03e5Sespie rtx insn;
3211*c87b03e5Sespie unsigned int regno;
3212*c87b03e5Sespie {
3213*c87b03e5Sespie unsigned int uid;
3214*c87b03e5Sespie struct df_link *link;
3215*c87b03e5Sespie
3216*c87b03e5Sespie uid = INSN_UID (insn);
3217*c87b03e5Sespie
3218*c87b03e5Sespie for (link = df->insns[uid].defs; link; link = link->next)
3219*c87b03e5Sespie {
3220*c87b03e5Sespie struct ref *def = link->ref;
3221*c87b03e5Sespie
3222*c87b03e5Sespie if (DF_REF_REGNO (def) == regno)
3223*c87b03e5Sespie return def;
3224*c87b03e5Sespie }
3225*c87b03e5Sespie
3226*c87b03e5Sespie return 0;
3227*c87b03e5Sespie }
3228*c87b03e5Sespie
3229*c87b03e5Sespie
3230*c87b03e5Sespie /* Return insn using REG if the BB contains only a single
3231*c87b03e5Sespie use and def of REG. */
3232*c87b03e5Sespie rtx
df_bb_single_def_use_insn_find(df,bb,insn,reg)3233*c87b03e5Sespie df_bb_single_def_use_insn_find (df, bb, insn, reg)
3234*c87b03e5Sespie struct df * df;
3235*c87b03e5Sespie basic_block bb;
3236*c87b03e5Sespie rtx insn;
3237*c87b03e5Sespie rtx reg;
3238*c87b03e5Sespie {
3239*c87b03e5Sespie struct ref *def;
3240*c87b03e5Sespie struct ref *use;
3241*c87b03e5Sespie struct df_link *du_link;
3242*c87b03e5Sespie
3243*c87b03e5Sespie def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3244*c87b03e5Sespie
3245*c87b03e5Sespie if (! def)
3246*c87b03e5Sespie abort ();
3247*c87b03e5Sespie
3248*c87b03e5Sespie du_link = DF_REF_CHAIN (def);
3249*c87b03e5Sespie
3250*c87b03e5Sespie if (! du_link)
3251*c87b03e5Sespie return NULL_RTX;
3252*c87b03e5Sespie
3253*c87b03e5Sespie use = du_link->ref;
3254*c87b03e5Sespie
3255*c87b03e5Sespie /* Check if def is dead. */
3256*c87b03e5Sespie if (! use)
3257*c87b03e5Sespie return NULL_RTX;
3258*c87b03e5Sespie
3259*c87b03e5Sespie /* Check for multiple uses. */
3260*c87b03e5Sespie if (du_link->next)
3261*c87b03e5Sespie return NULL_RTX;
3262*c87b03e5Sespie
3263*c87b03e5Sespie return DF_REF_INSN (use);
3264*c87b03e5Sespie }
3265*c87b03e5Sespie
3266*c87b03e5Sespie /* Functions for debugging/dumping dataflow information. */
3267*c87b03e5Sespie
3268*c87b03e5Sespie
3269*c87b03e5Sespie /* Dump a def-use or use-def chain for REF to FILE. */
3270*c87b03e5Sespie static void
df_chain_dump(link,file)3271*c87b03e5Sespie df_chain_dump (link, file)
3272*c87b03e5Sespie struct df_link *link;
3273*c87b03e5Sespie FILE *file;
3274*c87b03e5Sespie {
3275*c87b03e5Sespie fprintf (file, "{ ");
3276*c87b03e5Sespie for (; link; link = link->next)
3277*c87b03e5Sespie {
3278*c87b03e5Sespie fprintf (file, "%c%d ",
3279*c87b03e5Sespie DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3280*c87b03e5Sespie DF_REF_ID (link->ref));
3281*c87b03e5Sespie }
3282*c87b03e5Sespie fprintf (file, "}");
3283*c87b03e5Sespie }
3284*c87b03e5Sespie
3285*c87b03e5Sespie static void
df_chain_dump_regno(link,file)3286*c87b03e5Sespie df_chain_dump_regno (link, file)
3287*c87b03e5Sespie struct df_link *link;
3288*c87b03e5Sespie FILE *file;
3289*c87b03e5Sespie {
3290*c87b03e5Sespie fprintf (file, "{ ");
3291*c87b03e5Sespie for (; link; link = link->next)
3292*c87b03e5Sespie {
3293*c87b03e5Sespie fprintf (file, "%c%d(%d) ",
3294*c87b03e5Sespie DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3295*c87b03e5Sespie DF_REF_ID (link->ref),
3296*c87b03e5Sespie DF_REF_REGNO (link->ref));
3297*c87b03e5Sespie }
3298*c87b03e5Sespie fprintf (file, "}");
3299*c87b03e5Sespie }
3300*c87b03e5Sespie
3301*c87b03e5Sespie /* Dump dataflow info. */
3302*c87b03e5Sespie void
df_dump(df,flags,file)3303*c87b03e5Sespie df_dump (df, flags, file)
3304*c87b03e5Sespie struct df *df;
3305*c87b03e5Sespie int flags;
3306*c87b03e5Sespie FILE *file;
3307*c87b03e5Sespie {
3308*c87b03e5Sespie unsigned int j;
3309*c87b03e5Sespie basic_block bb;
3310*c87b03e5Sespie
3311*c87b03e5Sespie if (! df || ! file)
3312*c87b03e5Sespie return;
3313*c87b03e5Sespie
3314*c87b03e5Sespie fprintf (file, "\nDataflow summary:\n");
3315*c87b03e5Sespie fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3316*c87b03e5Sespie df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3317*c87b03e5Sespie
3318*c87b03e5Sespie if (flags & DF_RD)
3319*c87b03e5Sespie {
3320*c87b03e5Sespie basic_block bb;
3321*c87b03e5Sespie
3322*c87b03e5Sespie fprintf (file, "Reaching defs:\n");
3323*c87b03e5Sespie FOR_EACH_BB (bb)
3324*c87b03e5Sespie {
3325*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
3326*c87b03e5Sespie
3327*c87b03e5Sespie if (! bb_info->rd_in)
3328*c87b03e5Sespie continue;
3329*c87b03e5Sespie
3330*c87b03e5Sespie fprintf (file, "bb %d in \t", bb->index);
3331*c87b03e5Sespie dump_bitmap (file, bb_info->rd_in);
3332*c87b03e5Sespie fprintf (file, "bb %d gen \t", bb->index);
3333*c87b03e5Sespie dump_bitmap (file, bb_info->rd_gen);
3334*c87b03e5Sespie fprintf (file, "bb %d kill\t", bb->index);
3335*c87b03e5Sespie dump_bitmap (file, bb_info->rd_kill);
3336*c87b03e5Sespie fprintf (file, "bb %d out \t", bb->index);
3337*c87b03e5Sespie dump_bitmap (file, bb_info->rd_out);
3338*c87b03e5Sespie }
3339*c87b03e5Sespie }
3340*c87b03e5Sespie
3341*c87b03e5Sespie if (flags & DF_UD_CHAIN)
3342*c87b03e5Sespie {
3343*c87b03e5Sespie fprintf (file, "Use-def chains:\n");
3344*c87b03e5Sespie for (j = 0; j < df->n_defs; j++)
3345*c87b03e5Sespie {
3346*c87b03e5Sespie if (df->defs[j])
3347*c87b03e5Sespie {
3348*c87b03e5Sespie fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3349*c87b03e5Sespie j, DF_REF_BBNO (df->defs[j]),
3350*c87b03e5Sespie DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3351*c87b03e5Sespie DF_REF_INSN_UID (df->defs[j]),
3352*c87b03e5Sespie DF_REF_REGNO (df->defs[j]));
3353*c87b03e5Sespie if (df->defs[j]->flags & DF_REF_READ_WRITE)
3354*c87b03e5Sespie fprintf (file, "read/write ");
3355*c87b03e5Sespie df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3356*c87b03e5Sespie fprintf (file, "\n");
3357*c87b03e5Sespie }
3358*c87b03e5Sespie }
3359*c87b03e5Sespie }
3360*c87b03e5Sespie
3361*c87b03e5Sespie if (flags & DF_RU)
3362*c87b03e5Sespie {
3363*c87b03e5Sespie fprintf (file, "Reaching uses:\n");
3364*c87b03e5Sespie FOR_EACH_BB (bb)
3365*c87b03e5Sespie {
3366*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
3367*c87b03e5Sespie
3368*c87b03e5Sespie if (! bb_info->ru_in)
3369*c87b03e5Sespie continue;
3370*c87b03e5Sespie
3371*c87b03e5Sespie fprintf (file, "bb %d in \t", bb->index);
3372*c87b03e5Sespie dump_bitmap (file, bb_info->ru_in);
3373*c87b03e5Sespie fprintf (file, "bb %d gen \t", bb->index);
3374*c87b03e5Sespie dump_bitmap (file, bb_info->ru_gen);
3375*c87b03e5Sespie fprintf (file, "bb %d kill\t", bb->index);
3376*c87b03e5Sespie dump_bitmap (file, bb_info->ru_kill);
3377*c87b03e5Sespie fprintf (file, "bb %d out \t", bb->index);
3378*c87b03e5Sespie dump_bitmap (file, bb_info->ru_out);
3379*c87b03e5Sespie }
3380*c87b03e5Sespie }
3381*c87b03e5Sespie
3382*c87b03e5Sespie if (flags & DF_DU_CHAIN)
3383*c87b03e5Sespie {
3384*c87b03e5Sespie fprintf (file, "Def-use chains:\n");
3385*c87b03e5Sespie for (j = 0; j < df->n_uses; j++)
3386*c87b03e5Sespie {
3387*c87b03e5Sespie if (df->uses[j])
3388*c87b03e5Sespie {
3389*c87b03e5Sespie fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3390*c87b03e5Sespie j, DF_REF_BBNO (df->uses[j]),
3391*c87b03e5Sespie DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3392*c87b03e5Sespie DF_REF_INSN_UID (df->uses[j]),
3393*c87b03e5Sespie DF_REF_REGNO (df->uses[j]));
3394*c87b03e5Sespie if (df->uses[j]->flags & DF_REF_READ_WRITE)
3395*c87b03e5Sespie fprintf (file, "read/write ");
3396*c87b03e5Sespie df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3397*c87b03e5Sespie fprintf (file, "\n");
3398*c87b03e5Sespie }
3399*c87b03e5Sespie }
3400*c87b03e5Sespie }
3401*c87b03e5Sespie
3402*c87b03e5Sespie if (flags & DF_LR)
3403*c87b03e5Sespie {
3404*c87b03e5Sespie fprintf (file, "Live regs:\n");
3405*c87b03e5Sespie FOR_EACH_BB (bb)
3406*c87b03e5Sespie {
3407*c87b03e5Sespie struct bb_info *bb_info = DF_BB_INFO (df, bb);
3408*c87b03e5Sespie
3409*c87b03e5Sespie if (! bb_info->lr_in)
3410*c87b03e5Sespie continue;
3411*c87b03e5Sespie
3412*c87b03e5Sespie fprintf (file, "bb %d in \t", bb->index);
3413*c87b03e5Sespie dump_bitmap (file, bb_info->lr_in);
3414*c87b03e5Sespie fprintf (file, "bb %d use \t", bb->index);
3415*c87b03e5Sespie dump_bitmap (file, bb_info->lr_use);
3416*c87b03e5Sespie fprintf (file, "bb %d def \t", bb->index);
3417*c87b03e5Sespie dump_bitmap (file, bb_info->lr_def);
3418*c87b03e5Sespie fprintf (file, "bb %d out \t", bb->index);
3419*c87b03e5Sespie dump_bitmap (file, bb_info->lr_out);
3420*c87b03e5Sespie }
3421*c87b03e5Sespie }
3422*c87b03e5Sespie
3423*c87b03e5Sespie if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3424*c87b03e5Sespie {
3425*c87b03e5Sespie struct reg_info *reg_info = df->regs;
3426*c87b03e5Sespie
3427*c87b03e5Sespie fprintf (file, "Register info:\n");
3428*c87b03e5Sespie for (j = 0; j < df->n_regs; j++)
3429*c87b03e5Sespie {
3430*c87b03e5Sespie if (((flags & DF_REG_INFO)
3431*c87b03e5Sespie && (reg_info[j].n_uses || reg_info[j].n_defs))
3432*c87b03e5Sespie || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3433*c87b03e5Sespie || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3434*c87b03e5Sespie {
3435*c87b03e5Sespie fprintf (file, "reg %d", j);
3436*c87b03e5Sespie if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3437*c87b03e5Sespie {
3438*c87b03e5Sespie basic_block bb = df_regno_bb (df, j);
3439*c87b03e5Sespie
3440*c87b03e5Sespie if (bb)
3441*c87b03e5Sespie fprintf (file, " bb %d", bb->index);
3442*c87b03e5Sespie else
3443*c87b03e5Sespie fprintf (file, " bb ?");
3444*c87b03e5Sespie }
3445*c87b03e5Sespie if (flags & DF_REG_INFO)
3446*c87b03e5Sespie {
3447*c87b03e5Sespie fprintf (file, " life %d", reg_info[j].lifetime);
3448*c87b03e5Sespie }
3449*c87b03e5Sespie
3450*c87b03e5Sespie if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3451*c87b03e5Sespie {
3452*c87b03e5Sespie fprintf (file, " defs ");
3453*c87b03e5Sespie if (flags & DF_REG_INFO)
3454*c87b03e5Sespie fprintf (file, "%d ", reg_info[j].n_defs);
3455*c87b03e5Sespie if (flags & DF_RD_CHAIN)
3456*c87b03e5Sespie df_chain_dump (reg_info[j].defs, file);
3457*c87b03e5Sespie }
3458*c87b03e5Sespie
3459*c87b03e5Sespie if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3460*c87b03e5Sespie {
3461*c87b03e5Sespie fprintf (file, " uses ");
3462*c87b03e5Sespie if (flags & DF_REG_INFO)
3463*c87b03e5Sespie fprintf (file, "%d ", reg_info[j].n_uses);
3464*c87b03e5Sespie if (flags & DF_RU_CHAIN)
3465*c87b03e5Sespie df_chain_dump (reg_info[j].uses, file);
3466*c87b03e5Sespie }
3467*c87b03e5Sespie
3468*c87b03e5Sespie fprintf (file, "\n");
3469*c87b03e5Sespie }
3470*c87b03e5Sespie }
3471*c87b03e5Sespie }
3472*c87b03e5Sespie fprintf (file, "\n");
3473*c87b03e5Sespie }
3474*c87b03e5Sespie
3475*c87b03e5Sespie
3476*c87b03e5Sespie void
df_insn_debug(df,insn,file)3477*c87b03e5Sespie df_insn_debug (df, insn, file)
3478*c87b03e5Sespie struct df *df;
3479*c87b03e5Sespie rtx insn;
3480*c87b03e5Sespie FILE *file;
3481*c87b03e5Sespie {
3482*c87b03e5Sespie unsigned int uid;
3483*c87b03e5Sespie int bbi;
3484*c87b03e5Sespie
3485*c87b03e5Sespie uid = INSN_UID (insn);
3486*c87b03e5Sespie if (uid >= df->insn_size)
3487*c87b03e5Sespie return;
3488*c87b03e5Sespie
3489*c87b03e5Sespie if (df->insns[uid].defs)
3490*c87b03e5Sespie bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3491*c87b03e5Sespie else if (df->insns[uid].uses)
3492*c87b03e5Sespie bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3493*c87b03e5Sespie else
3494*c87b03e5Sespie bbi = -1;
3495*c87b03e5Sespie
3496*c87b03e5Sespie fprintf (file, "insn %d bb %d luid %d defs ",
3497*c87b03e5Sespie uid, bbi, DF_INSN_LUID (df, insn));
3498*c87b03e5Sespie df_chain_dump (df->insns[uid].defs, file);
3499*c87b03e5Sespie fprintf (file, " uses ");
3500*c87b03e5Sespie df_chain_dump (df->insns[uid].uses, file);
3501*c87b03e5Sespie fprintf (file, "\n");
3502*c87b03e5Sespie }
3503*c87b03e5Sespie
3504*c87b03e5Sespie void
df_insn_debug_regno(df,insn,file)3505*c87b03e5Sespie df_insn_debug_regno (df, insn, file)
3506*c87b03e5Sespie struct df *df;
3507*c87b03e5Sespie rtx insn;
3508*c87b03e5Sespie FILE *file;
3509*c87b03e5Sespie {
3510*c87b03e5Sespie unsigned int uid;
3511*c87b03e5Sespie int bbi;
3512*c87b03e5Sespie
3513*c87b03e5Sespie uid = INSN_UID (insn);
3514*c87b03e5Sespie if (uid >= df->insn_size)
3515*c87b03e5Sespie return;
3516*c87b03e5Sespie
3517*c87b03e5Sespie if (df->insns[uid].defs)
3518*c87b03e5Sespie bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3519*c87b03e5Sespie else if (df->insns[uid].uses)
3520*c87b03e5Sespie bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3521*c87b03e5Sespie else
3522*c87b03e5Sespie bbi = -1;
3523*c87b03e5Sespie
3524*c87b03e5Sespie fprintf (file, "insn %d bb %d luid %d defs ",
3525*c87b03e5Sespie uid, bbi, DF_INSN_LUID (df, insn));
3526*c87b03e5Sespie df_chain_dump_regno (df->insns[uid].defs, file);
3527*c87b03e5Sespie fprintf (file, " uses ");
3528*c87b03e5Sespie df_chain_dump_regno (df->insns[uid].uses, file);
3529*c87b03e5Sespie fprintf (file, "\n");
3530*c87b03e5Sespie }
3531*c87b03e5Sespie
3532*c87b03e5Sespie static void
df_regno_debug(df,regno,file)3533*c87b03e5Sespie df_regno_debug (df, regno, file)
3534*c87b03e5Sespie struct df *df;
3535*c87b03e5Sespie unsigned int regno;
3536*c87b03e5Sespie FILE *file;
3537*c87b03e5Sespie {
3538*c87b03e5Sespie if (regno >= df->reg_size)
3539*c87b03e5Sespie return;
3540*c87b03e5Sespie
3541*c87b03e5Sespie fprintf (file, "reg %d life %d defs ",
3542*c87b03e5Sespie regno, df->regs[regno].lifetime);
3543*c87b03e5Sespie df_chain_dump (df->regs[regno].defs, file);
3544*c87b03e5Sespie fprintf (file, " uses ");
3545*c87b03e5Sespie df_chain_dump (df->regs[regno].uses, file);
3546*c87b03e5Sespie fprintf (file, "\n");
3547*c87b03e5Sespie }
3548*c87b03e5Sespie
3549*c87b03e5Sespie
3550*c87b03e5Sespie static void
df_ref_debug(df,ref,file)3551*c87b03e5Sespie df_ref_debug (df, ref, file)
3552*c87b03e5Sespie struct df *df;
3553*c87b03e5Sespie struct ref *ref;
3554*c87b03e5Sespie FILE *file;
3555*c87b03e5Sespie {
3556*c87b03e5Sespie fprintf (file, "%c%d ",
3557*c87b03e5Sespie DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3558*c87b03e5Sespie DF_REF_ID (ref));
3559*c87b03e5Sespie fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3560*c87b03e5Sespie DF_REF_REGNO (ref),
3561*c87b03e5Sespie DF_REF_BBNO (ref),
3562*c87b03e5Sespie DF_INSN_LUID (df, DF_REF_INSN (ref)),
3563*c87b03e5Sespie INSN_UID (DF_REF_INSN (ref)));
3564*c87b03e5Sespie df_chain_dump (DF_REF_CHAIN (ref), file);
3565*c87b03e5Sespie fprintf (file, "\n");
3566*c87b03e5Sespie }
3567*c87b03e5Sespie
3568*c87b03e5Sespie
3569*c87b03e5Sespie void
debug_df_insn(insn)3570*c87b03e5Sespie debug_df_insn (insn)
3571*c87b03e5Sespie rtx insn;
3572*c87b03e5Sespie {
3573*c87b03e5Sespie df_insn_debug (ddf, insn, stderr);
3574*c87b03e5Sespie debug_rtx (insn);
3575*c87b03e5Sespie }
3576*c87b03e5Sespie
3577*c87b03e5Sespie
3578*c87b03e5Sespie void
debug_df_reg(reg)3579*c87b03e5Sespie debug_df_reg (reg)
3580*c87b03e5Sespie rtx reg;
3581*c87b03e5Sespie {
3582*c87b03e5Sespie df_regno_debug (ddf, REGNO (reg), stderr);
3583*c87b03e5Sespie }
3584*c87b03e5Sespie
3585*c87b03e5Sespie
3586*c87b03e5Sespie void
debug_df_regno(regno)3587*c87b03e5Sespie debug_df_regno (regno)
3588*c87b03e5Sespie unsigned int regno;
3589*c87b03e5Sespie {
3590*c87b03e5Sespie df_regno_debug (ddf, regno, stderr);
3591*c87b03e5Sespie }
3592*c87b03e5Sespie
3593*c87b03e5Sespie
3594*c87b03e5Sespie void
debug_df_ref(ref)3595*c87b03e5Sespie debug_df_ref (ref)
3596*c87b03e5Sespie struct ref *ref;
3597*c87b03e5Sespie {
3598*c87b03e5Sespie df_ref_debug (ddf, ref, stderr);
3599*c87b03e5Sespie }
3600*c87b03e5Sespie
3601*c87b03e5Sespie
3602*c87b03e5Sespie void
debug_df_defno(defno)3603*c87b03e5Sespie debug_df_defno (defno)
3604*c87b03e5Sespie unsigned int defno;
3605*c87b03e5Sespie {
3606*c87b03e5Sespie df_ref_debug (ddf, ddf->defs[defno], stderr);
3607*c87b03e5Sespie }
3608*c87b03e5Sespie
3609*c87b03e5Sespie
3610*c87b03e5Sespie void
debug_df_useno(defno)3611*c87b03e5Sespie debug_df_useno (defno)
3612*c87b03e5Sespie unsigned int defno;
3613*c87b03e5Sespie {
3614*c87b03e5Sespie df_ref_debug (ddf, ddf->uses[defno], stderr);
3615*c87b03e5Sespie }
3616*c87b03e5Sespie
3617*c87b03e5Sespie
3618*c87b03e5Sespie void
debug_df_chain(link)3619*c87b03e5Sespie debug_df_chain (link)
3620*c87b03e5Sespie struct df_link *link;
3621*c87b03e5Sespie {
3622*c87b03e5Sespie df_chain_dump (link, stderr);
3623*c87b03e5Sespie fputc ('\n', stderr);
3624*c87b03e5Sespie }
3625*c87b03e5Sespie
3626*c87b03e5Sespie /* Hybrid search algorithm from "Implementation Techniques for
3627*c87b03e5Sespie Efficient Data-Flow Analysis of Large Programs". */
3628*c87b03e5Sespie static void
hybrid_search_bitmap(block,in,out,gen,kill,dir,conf_op,transfun,visited,pending,data)3629*c87b03e5Sespie hybrid_search_bitmap (block, in, out, gen, kill, dir,
3630*c87b03e5Sespie conf_op, transfun, visited, pending,
3631*c87b03e5Sespie data)
3632*c87b03e5Sespie basic_block block;
3633*c87b03e5Sespie bitmap *in, *out, *gen, *kill;
3634*c87b03e5Sespie enum df_flow_dir dir;
3635*c87b03e5Sespie enum df_confluence_op conf_op;
3636*c87b03e5Sespie transfer_function_bitmap transfun;
3637*c87b03e5Sespie sbitmap visited;
3638*c87b03e5Sespie sbitmap pending;
3639*c87b03e5Sespie void *data;
3640*c87b03e5Sespie {
3641*c87b03e5Sespie int changed;
3642*c87b03e5Sespie int i = block->index;
3643*c87b03e5Sespie edge e;
3644*c87b03e5Sespie basic_block bb = block;
3645*c87b03e5Sespie SET_BIT (visited, block->index);
3646*c87b03e5Sespie if (TEST_BIT (pending, block->index))
3647*c87b03e5Sespie {
3648*c87b03e5Sespie if (dir == FORWARD)
3649*c87b03e5Sespie {
3650*c87b03e5Sespie /* Calculate <conf_op> of predecessor_outs */
3651*c87b03e5Sespie bitmap_zero (in[i]);
3652*c87b03e5Sespie for (e = bb->pred; e != 0; e = e->pred_next)
3653*c87b03e5Sespie {
3654*c87b03e5Sespie if (e->src == ENTRY_BLOCK_PTR)
3655*c87b03e5Sespie continue;
3656*c87b03e5Sespie switch (conf_op)
3657*c87b03e5Sespie {
3658*c87b03e5Sespie case UNION:
3659*c87b03e5Sespie bitmap_a_or_b (in[i], in[i], out[e->src->index]);
3660*c87b03e5Sespie break;
3661*c87b03e5Sespie case INTERSECTION:
3662*c87b03e5Sespie bitmap_a_and_b (in[i], in[i], out[e->src->index]);
3663*c87b03e5Sespie break;
3664*c87b03e5Sespie }
3665*c87b03e5Sespie }
3666*c87b03e5Sespie }
3667*c87b03e5Sespie else
3668*c87b03e5Sespie {
3669*c87b03e5Sespie /* Calculate <conf_op> of successor ins */
3670*c87b03e5Sespie bitmap_zero (out[i]);
3671*c87b03e5Sespie for (e = bb->succ; e != 0; e = e->succ_next)
3672*c87b03e5Sespie {
3673*c87b03e5Sespie if (e->dest == EXIT_BLOCK_PTR)
3674*c87b03e5Sespie continue;
3675*c87b03e5Sespie switch (conf_op)
3676*c87b03e5Sespie {
3677*c87b03e5Sespie case UNION:
3678*c87b03e5Sespie bitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3679*c87b03e5Sespie break;
3680*c87b03e5Sespie case INTERSECTION:
3681*c87b03e5Sespie bitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3682*c87b03e5Sespie break;
3683*c87b03e5Sespie }
3684*c87b03e5Sespie }
3685*c87b03e5Sespie }
3686*c87b03e5Sespie /* Common part */
3687*c87b03e5Sespie (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3688*c87b03e5Sespie RESET_BIT (pending, i);
3689*c87b03e5Sespie if (changed)
3690*c87b03e5Sespie {
3691*c87b03e5Sespie if (dir == FORWARD)
3692*c87b03e5Sespie {
3693*c87b03e5Sespie for (e = bb->succ; e != 0; e = e->succ_next)
3694*c87b03e5Sespie {
3695*c87b03e5Sespie if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3696*c87b03e5Sespie continue;
3697*c87b03e5Sespie SET_BIT (pending, e->dest->index);
3698*c87b03e5Sespie }
3699*c87b03e5Sespie }
3700*c87b03e5Sespie else
3701*c87b03e5Sespie {
3702*c87b03e5Sespie for (e = bb->pred; e != 0; e = e->pred_next)
3703*c87b03e5Sespie {
3704*c87b03e5Sespie if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3705*c87b03e5Sespie continue;
3706*c87b03e5Sespie SET_BIT (pending, e->src->index);
3707*c87b03e5Sespie }
3708*c87b03e5Sespie }
3709*c87b03e5Sespie }
3710*c87b03e5Sespie }
3711*c87b03e5Sespie if (dir == FORWARD)
3712*c87b03e5Sespie {
3713*c87b03e5Sespie for (e = bb->succ; e != 0; e = e->succ_next)
3714*c87b03e5Sespie {
3715*c87b03e5Sespie if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3716*c87b03e5Sespie continue;
3717*c87b03e5Sespie if (!TEST_BIT (visited, e->dest->index))
3718*c87b03e5Sespie hybrid_search_bitmap (e->dest, in, out, gen, kill, dir,
3719*c87b03e5Sespie conf_op, transfun, visited, pending,
3720*c87b03e5Sespie data);
3721*c87b03e5Sespie }
3722*c87b03e5Sespie }
3723*c87b03e5Sespie else
3724*c87b03e5Sespie {
3725*c87b03e5Sespie for (e = bb->pred; e != 0; e = e->pred_next)
3726*c87b03e5Sespie {
3727*c87b03e5Sespie if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3728*c87b03e5Sespie continue;
3729*c87b03e5Sespie if (!TEST_BIT (visited, e->src->index))
3730*c87b03e5Sespie hybrid_search_bitmap (e->src, in, out, gen, kill, dir,
3731*c87b03e5Sespie conf_op, transfun, visited, pending,
3732*c87b03e5Sespie data);
3733*c87b03e5Sespie }
3734*c87b03e5Sespie }
3735*c87b03e5Sespie }
3736*c87b03e5Sespie
3737*c87b03e5Sespie
3738*c87b03e5Sespie /* Hybrid search for sbitmaps, rather than bitmaps. */
3739*c87b03e5Sespie static void
hybrid_search_sbitmap(block,in,out,gen,kill,dir,conf_op,transfun,visited,pending,data)3740*c87b03e5Sespie hybrid_search_sbitmap (block, in, out, gen, kill, dir,
3741*c87b03e5Sespie conf_op, transfun, visited, pending,
3742*c87b03e5Sespie data)
3743*c87b03e5Sespie basic_block block;
3744*c87b03e5Sespie sbitmap *in, *out, *gen, *kill;
3745*c87b03e5Sespie enum df_flow_dir dir;
3746*c87b03e5Sespie enum df_confluence_op conf_op;
3747*c87b03e5Sespie transfer_function_sbitmap transfun;
3748*c87b03e5Sespie sbitmap visited;
3749*c87b03e5Sespie sbitmap pending;
3750*c87b03e5Sespie void *data;
3751*c87b03e5Sespie {
3752*c87b03e5Sespie int changed;
3753*c87b03e5Sespie int i = block->index;
3754*c87b03e5Sespie edge e;
3755*c87b03e5Sespie basic_block bb = block;
3756*c87b03e5Sespie SET_BIT (visited, block->index);
3757*c87b03e5Sespie if (TEST_BIT (pending, block->index))
3758*c87b03e5Sespie {
3759*c87b03e5Sespie if (dir == FORWARD)
3760*c87b03e5Sespie {
3761*c87b03e5Sespie /* Calculate <conf_op> of predecessor_outs */
3762*c87b03e5Sespie sbitmap_zero (in[i]);
3763*c87b03e5Sespie for (e = bb->pred; e != 0; e = e->pred_next)
3764*c87b03e5Sespie {
3765*c87b03e5Sespie if (e->src == ENTRY_BLOCK_PTR)
3766*c87b03e5Sespie continue;
3767*c87b03e5Sespie switch (conf_op)
3768*c87b03e5Sespie {
3769*c87b03e5Sespie case UNION:
3770*c87b03e5Sespie sbitmap_a_or_b (in[i], in[i], out[e->src->index]);
3771*c87b03e5Sespie break;
3772*c87b03e5Sespie case INTERSECTION:
3773*c87b03e5Sespie sbitmap_a_and_b (in[i], in[i], out[e->src->index]);
3774*c87b03e5Sespie break;
3775*c87b03e5Sespie }
3776*c87b03e5Sespie }
3777*c87b03e5Sespie }
3778*c87b03e5Sespie else
3779*c87b03e5Sespie {
3780*c87b03e5Sespie /* Calculate <conf_op> of successor ins */
3781*c87b03e5Sespie sbitmap_zero (out[i]);
3782*c87b03e5Sespie for (e = bb->succ; e != 0; e = e->succ_next)
3783*c87b03e5Sespie {
3784*c87b03e5Sespie if (e->dest == EXIT_BLOCK_PTR)
3785*c87b03e5Sespie continue;
3786*c87b03e5Sespie switch (conf_op)
3787*c87b03e5Sespie {
3788*c87b03e5Sespie case UNION:
3789*c87b03e5Sespie sbitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3790*c87b03e5Sespie break;
3791*c87b03e5Sespie case INTERSECTION:
3792*c87b03e5Sespie sbitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3793*c87b03e5Sespie break;
3794*c87b03e5Sespie }
3795*c87b03e5Sespie }
3796*c87b03e5Sespie }
3797*c87b03e5Sespie /* Common part */
3798*c87b03e5Sespie (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3799*c87b03e5Sespie RESET_BIT (pending, i);
3800*c87b03e5Sespie if (changed)
3801*c87b03e5Sespie {
3802*c87b03e5Sespie if (dir == FORWARD)
3803*c87b03e5Sespie {
3804*c87b03e5Sespie for (e = bb->succ; e != 0; e = e->succ_next)
3805*c87b03e5Sespie {
3806*c87b03e5Sespie if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3807*c87b03e5Sespie continue;
3808*c87b03e5Sespie SET_BIT (pending, e->dest->index);
3809*c87b03e5Sespie }
3810*c87b03e5Sespie }
3811*c87b03e5Sespie else
3812*c87b03e5Sespie {
3813*c87b03e5Sespie for (e = bb->pred; e != 0; e = e->pred_next)
3814*c87b03e5Sespie {
3815*c87b03e5Sespie if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3816*c87b03e5Sespie continue;
3817*c87b03e5Sespie SET_BIT (pending, e->src->index);
3818*c87b03e5Sespie }
3819*c87b03e5Sespie }
3820*c87b03e5Sespie }
3821*c87b03e5Sespie }
3822*c87b03e5Sespie if (dir == FORWARD)
3823*c87b03e5Sespie {
3824*c87b03e5Sespie for (e = bb->succ; e != 0; e = e->succ_next)
3825*c87b03e5Sespie {
3826*c87b03e5Sespie if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3827*c87b03e5Sespie continue;
3828*c87b03e5Sespie if (!TEST_BIT (visited, e->dest->index))
3829*c87b03e5Sespie hybrid_search_sbitmap (e->dest, in, out, gen, kill, dir,
3830*c87b03e5Sespie conf_op, transfun, visited, pending,
3831*c87b03e5Sespie data);
3832*c87b03e5Sespie }
3833*c87b03e5Sespie }
3834*c87b03e5Sespie else
3835*c87b03e5Sespie {
3836*c87b03e5Sespie for (e = bb->pred; e != 0; e = e->pred_next)
3837*c87b03e5Sespie {
3838*c87b03e5Sespie if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3839*c87b03e5Sespie continue;
3840*c87b03e5Sespie if (!TEST_BIT (visited, e->src->index))
3841*c87b03e5Sespie hybrid_search_sbitmap (e->src, in, out, gen, kill, dir,
3842*c87b03e5Sespie conf_op, transfun, visited, pending,
3843*c87b03e5Sespie data);
3844*c87b03e5Sespie }
3845*c87b03e5Sespie }
3846*c87b03e5Sespie }
3847*c87b03e5Sespie
3848*c87b03e5Sespie
3849*c87b03e5Sespie
3850*c87b03e5Sespie
3851*c87b03e5Sespie /* gen = GEN set.
3852*c87b03e5Sespie kill = KILL set.
3853*c87b03e5Sespie in, out = Filled in by function.
3854*c87b03e5Sespie blocks = Blocks to analyze.
3855*c87b03e5Sespie dir = Dataflow direction.
3856*c87b03e5Sespie conf_op = Confluence operation.
3857*c87b03e5Sespie transfun = Transfer function.
3858*c87b03e5Sespie order = Order to iterate in. (Should map block numbers -> order)
3859*c87b03e5Sespie data = Whatever you want. It's passed to the transfer function.
3860*c87b03e5Sespie
3861*c87b03e5Sespie This function will perform iterative bitvector dataflow, producing
3862*c87b03e5Sespie the in and out sets. Even if you only want to perform it for a
3863*c87b03e5Sespie small number of blocks, the vectors for in and out must be large
3864*c87b03e5Sespie enough for *all* blocks, because changing one block might affect
3865*c87b03e5Sespie others. However, it'll only put what you say to analyze on the
3866*c87b03e5Sespie initial worklist.
3867*c87b03e5Sespie
3868*c87b03e5Sespie For forward problems, you probably want to pass in a mapping of
3869*c87b03e5Sespie block number to rc_order (like df->inverse_rc_map).
3870*c87b03e5Sespie */
3871*c87b03e5Sespie void
iterative_dataflow_sbitmap(in,out,gen,kill,blocks,dir,conf_op,transfun,order,data)3872*c87b03e5Sespie iterative_dataflow_sbitmap (in, out, gen, kill, blocks,
3873*c87b03e5Sespie dir, conf_op, transfun, order, data)
3874*c87b03e5Sespie sbitmap *in, *out, *gen, *kill;
3875*c87b03e5Sespie bitmap blocks;
3876*c87b03e5Sespie enum df_flow_dir dir;
3877*c87b03e5Sespie enum df_confluence_op conf_op;
3878*c87b03e5Sespie transfer_function_sbitmap transfun;
3879*c87b03e5Sespie int *order;
3880*c87b03e5Sespie void *data;
3881*c87b03e5Sespie {
3882*c87b03e5Sespie int i;
3883*c87b03e5Sespie fibheap_t worklist;
3884*c87b03e5Sespie basic_block bb;
3885*c87b03e5Sespie sbitmap visited, pending;
3886*c87b03e5Sespie pending = sbitmap_alloc (last_basic_block);
3887*c87b03e5Sespie visited = sbitmap_alloc (last_basic_block);
3888*c87b03e5Sespie sbitmap_zero (pending);
3889*c87b03e5Sespie sbitmap_zero (visited);
3890*c87b03e5Sespie worklist = fibheap_new ();
3891*c87b03e5Sespie EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3892*c87b03e5Sespie {
3893*c87b03e5Sespie fibheap_insert (worklist, order[i], (void *) (size_t) i);
3894*c87b03e5Sespie SET_BIT (pending, i);
3895*c87b03e5Sespie if (dir == FORWARD)
3896*c87b03e5Sespie sbitmap_copy (out[i], gen[i]);
3897*c87b03e5Sespie else
3898*c87b03e5Sespie sbitmap_copy (in[i], gen[i]);
3899*c87b03e5Sespie });
3900*c87b03e5Sespie while (sbitmap_first_set_bit (pending) != -1)
3901*c87b03e5Sespie {
3902*c87b03e5Sespie while (!fibheap_empty (worklist))
3903*c87b03e5Sespie {
3904*c87b03e5Sespie i = (size_t) fibheap_extract_min (worklist);
3905*c87b03e5Sespie bb = BASIC_BLOCK (i);
3906*c87b03e5Sespie if (!TEST_BIT (visited, bb->index))
3907*c87b03e5Sespie hybrid_search_sbitmap (bb, in, out, gen, kill, dir,
3908*c87b03e5Sespie conf_op, transfun, visited, pending, data);
3909*c87b03e5Sespie }
3910*c87b03e5Sespie if (sbitmap_first_set_bit (pending) != -1)
3911*c87b03e5Sespie {
3912*c87b03e5Sespie EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3913*c87b03e5Sespie {
3914*c87b03e5Sespie fibheap_insert (worklist, order[i], (void *) (size_t) i);
3915*c87b03e5Sespie });
3916*c87b03e5Sespie sbitmap_zero (visited);
3917*c87b03e5Sespie }
3918*c87b03e5Sespie else
3919*c87b03e5Sespie {
3920*c87b03e5Sespie break;
3921*c87b03e5Sespie }
3922*c87b03e5Sespie }
3923*c87b03e5Sespie sbitmap_free (pending);
3924*c87b03e5Sespie sbitmap_free (visited);
3925*c87b03e5Sespie fibheap_delete (worklist);
3926*c87b03e5Sespie }
3927*c87b03e5Sespie
3928*c87b03e5Sespie /* Exactly the same as iterative_dataflow_sbitmap, except it works on
3929*c87b03e5Sespie bitmaps instead */
3930*c87b03e5Sespie void
iterative_dataflow_bitmap(in,out,gen,kill,blocks,dir,conf_op,transfun,order,data)3931*c87b03e5Sespie iterative_dataflow_bitmap (in, out, gen, kill, blocks,
3932*c87b03e5Sespie dir, conf_op, transfun, order, data)
3933*c87b03e5Sespie bitmap *in, *out, *gen, *kill;
3934*c87b03e5Sespie bitmap blocks;
3935*c87b03e5Sespie enum df_flow_dir dir;
3936*c87b03e5Sespie enum df_confluence_op conf_op;
3937*c87b03e5Sespie transfer_function_bitmap transfun;
3938*c87b03e5Sespie int *order;
3939*c87b03e5Sespie void *data;
3940*c87b03e5Sespie {
3941*c87b03e5Sespie int i;
3942*c87b03e5Sespie fibheap_t worklist;
3943*c87b03e5Sespie basic_block bb;
3944*c87b03e5Sespie sbitmap visited, pending;
3945*c87b03e5Sespie pending = sbitmap_alloc (last_basic_block);
3946*c87b03e5Sespie visited = sbitmap_alloc (last_basic_block);
3947*c87b03e5Sespie sbitmap_zero (pending);
3948*c87b03e5Sespie sbitmap_zero (visited);
3949*c87b03e5Sespie worklist = fibheap_new ();
3950*c87b03e5Sespie EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3951*c87b03e5Sespie {
3952*c87b03e5Sespie fibheap_insert (worklist, order[i], (void *) (size_t) i);
3953*c87b03e5Sespie SET_BIT (pending, i);
3954*c87b03e5Sespie if (dir == FORWARD)
3955*c87b03e5Sespie bitmap_copy (out[i], gen[i]);
3956*c87b03e5Sespie else
3957*c87b03e5Sespie bitmap_copy (in[i], gen[i]);
3958*c87b03e5Sespie });
3959*c87b03e5Sespie while (sbitmap_first_set_bit (pending) != -1)
3960*c87b03e5Sespie {
3961*c87b03e5Sespie while (!fibheap_empty (worklist))
3962*c87b03e5Sespie {
3963*c87b03e5Sespie i = (size_t) fibheap_extract_min (worklist);
3964*c87b03e5Sespie bb = BASIC_BLOCK (i);
3965*c87b03e5Sespie if (!TEST_BIT (visited, bb->index))
3966*c87b03e5Sespie hybrid_search_bitmap (bb, in, out, gen, kill, dir,
3967*c87b03e5Sespie conf_op, transfun, visited, pending, data);
3968*c87b03e5Sespie }
3969*c87b03e5Sespie if (sbitmap_first_set_bit (pending) != -1)
3970*c87b03e5Sespie {
3971*c87b03e5Sespie EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3972*c87b03e5Sespie {
3973*c87b03e5Sespie fibheap_insert (worklist, order[i], (void *) (size_t) i);
3974*c87b03e5Sespie });
3975*c87b03e5Sespie sbitmap_zero (visited);
3976*c87b03e5Sespie }
3977*c87b03e5Sespie else
3978*c87b03e5Sespie {
3979*c87b03e5Sespie break;
3980*c87b03e5Sespie }
3981*c87b03e5Sespie }
3982*c87b03e5Sespie sbitmap_free (pending);
3983*c87b03e5Sespie sbitmap_free (visited);
3984*c87b03e5Sespie fibheap_delete (worklist);
3985*c87b03e5Sespie }
3986