xref: /dflybsd-src/contrib/gcc-8.0/gcc/bt-load.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* Perform branch target register load optimizations.
2*38fd1498Szrj    Copyright (C) 2001-2018 Free Software Foundation, Inc.
3*38fd1498Szrj 
4*38fd1498Szrj This file is part of GCC.
5*38fd1498Szrj 
6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
7*38fd1498Szrj the terms of the GNU General Public License as published by the Free
8*38fd1498Szrj Software Foundation; either version 3, or (at your option) any later
9*38fd1498Szrj version.
10*38fd1498Szrj 
11*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12*38fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
13*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14*38fd1498Szrj for more details.
15*38fd1498Szrj 
16*38fd1498Szrj You should have received a copy of the GNU General Public License
17*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
18*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
19*38fd1498Szrj 
20*38fd1498Szrj #include "config.h"
21*38fd1498Szrj #include "system.h"
22*38fd1498Szrj #include "coretypes.h"
23*38fd1498Szrj #include "backend.h"
24*38fd1498Szrj #include "target.h"
25*38fd1498Szrj #include "rtl.h"
26*38fd1498Szrj #include "tree.h"
27*38fd1498Szrj #include "df.h"
28*38fd1498Szrj #include "insn-config.h"
29*38fd1498Szrj #include "regs.h"
30*38fd1498Szrj #include "memmodel.h"
31*38fd1498Szrj #include "emit-rtl.h"
32*38fd1498Szrj #include "recog.h"
33*38fd1498Szrj #include "diagnostic-core.h"
34*38fd1498Szrj #include "expr.h"
35*38fd1498Szrj #include "insn-attr.h"
36*38fd1498Szrj #include "tree-pass.h"
37*38fd1498Szrj #include "cfgrtl.h"
38*38fd1498Szrj #include "cfganal.h"
39*38fd1498Szrj #include "cfgcleanup.h"
40*38fd1498Szrj #include "cfgloop.h"
41*38fd1498Szrj #include "rtl-iter.h"
42*38fd1498Szrj #include "fibonacci_heap.h"
43*38fd1498Szrj 
44*38fd1498Szrj struct btr_def;
45*38fd1498Szrj 
46*38fd1498Szrj /* Target register optimizations - these are performed after reload.  */
47*38fd1498Szrj 
48*38fd1498Szrj struct btr_def_group
49*38fd1498Szrj {
50*38fd1498Szrj   btr_def_group *next;
51*38fd1498Szrj   rtx src;
52*38fd1498Szrj   btr_def *members;
53*38fd1498Szrj };
54*38fd1498Szrj 
55*38fd1498Szrj struct btr_user
56*38fd1498Szrj {
57*38fd1498Szrj   btr_user *next;
58*38fd1498Szrj   basic_block bb;
59*38fd1498Szrj   int luid;
60*38fd1498Szrj   rtx_insn *insn;
61*38fd1498Szrj   /* If INSN has a single use of a single branch register, then
62*38fd1498Szrj      USE points to it within INSN.  If there is more than
63*38fd1498Szrj      one branch register use, or the use is in some way ambiguous,
64*38fd1498Szrj      then USE is NULL.  */
65*38fd1498Szrj   rtx use;
66*38fd1498Szrj   int n_reaching_defs;
67*38fd1498Szrj   int first_reaching_def;
68*38fd1498Szrj   char other_use_this_block;
69*38fd1498Szrj };
70*38fd1498Szrj 
71*38fd1498Szrj /* btr_def structs appear on three lists:
72*38fd1498Szrj      1. A list of all btr_def structures (head is
73*38fd1498Szrj 	ALL_BTR_DEFS, linked by the NEXT field).
74*38fd1498Szrj      2. A list of branch reg definitions per basic block (head is
75*38fd1498Szrj 	BB_BTR_DEFS[i], linked by the NEXT_THIS_BB field).
76*38fd1498Szrj      3. A list of all branch reg definitions belonging to the same
77*38fd1498Szrj 	group (head is in a BTR_DEF_GROUP struct, linked by
78*38fd1498Szrj 	NEXT_THIS_GROUP field).  */
79*38fd1498Szrj 
80*38fd1498Szrj struct btr_def
81*38fd1498Szrj {
82*38fd1498Szrj   btr_def *next_this_bb;
83*38fd1498Szrj   btr_def *next_this_group;
84*38fd1498Szrj   basic_block bb;
85*38fd1498Szrj   int luid;
86*38fd1498Szrj   rtx_insn *insn;
87*38fd1498Szrj   int btr;
88*38fd1498Szrj   int cost;
89*38fd1498Szrj   /* For a branch register setting insn that has a constant
90*38fd1498Szrj      source (i.e. a label), group links together all the
91*38fd1498Szrj      insns with the same source.  For other branch register
92*38fd1498Szrj      setting insns, group is NULL.  */
93*38fd1498Szrj   btr_def_group *group;
94*38fd1498Szrj   btr_user *uses;
95*38fd1498Szrj   /* If this def has a reaching use which is not a simple use
96*38fd1498Szrj      in a branch instruction, then has_ambiguous_use will be true,
97*38fd1498Szrj      and we will not attempt to migrate this definition.  */
98*38fd1498Szrj   char has_ambiguous_use;
99*38fd1498Szrj   /* live_range is an approximation to the true live range for this
100*38fd1498Szrj      def/use web, because it records the set of blocks that contain
101*38fd1498Szrj      the live range.  There could be other live ranges for the same
102*38fd1498Szrj      branch register in that set of blocks, either in the block
103*38fd1498Szrj      containing the def (before the def), or in a block containing
104*38fd1498Szrj      a use (after the use).  If there are such other live ranges, then
105*38fd1498Szrj      other_btr_uses_before_def or other_btr_uses_after_use must be set true
106*38fd1498Szrj      as appropriate.  */
107*38fd1498Szrj   char other_btr_uses_before_def;
108*38fd1498Szrj   char other_btr_uses_after_use;
109*38fd1498Szrj   /* We set own_end when we have moved a definition into a dominator.
110*38fd1498Szrj      Thus, when a later combination removes this definition again, we know
111*38fd1498Szrj      to clear out trs_live_at_end again.  */
112*38fd1498Szrj   char own_end;
113*38fd1498Szrj   bitmap live_range;
114*38fd1498Szrj };
115*38fd1498Szrj 
116*38fd1498Szrj typedef fibonacci_heap <long, btr_def> btr_heap_t;
117*38fd1498Szrj typedef fibonacci_node <long, btr_def> btr_heap_node_t;
118*38fd1498Szrj 
119*38fd1498Szrj static int issue_rate;
120*38fd1498Szrj 
121*38fd1498Szrj static int basic_block_freq (const_basic_block);
122*38fd1498Szrj static int insn_sets_btr_p (const rtx_insn *, int, int *);
123*38fd1498Szrj static void find_btr_def_group (btr_def_group **, btr_def *);
124*38fd1498Szrj static btr_def *add_btr_def (btr_heap_t *, basic_block, int, rtx_insn *,
125*38fd1498Szrj 			    unsigned int, int, btr_def_group **);
126*38fd1498Szrj static btr_user *new_btr_user (basic_block, int, rtx_insn *);
127*38fd1498Szrj static void dump_hard_reg_set (HARD_REG_SET);
128*38fd1498Szrj static void dump_btrs_live (int);
129*38fd1498Szrj static void note_other_use_this_block (unsigned int, btr_user *);
130*38fd1498Szrj static void compute_defs_uses_and_gen (btr_heap_t *, btr_def **, btr_user **,
131*38fd1498Szrj 				       sbitmap *, sbitmap *, HARD_REG_SET *);
132*38fd1498Szrj static void compute_kill (sbitmap *, sbitmap *, HARD_REG_SET *);
133*38fd1498Szrj static void compute_out (sbitmap *bb_out, sbitmap *, sbitmap *, int);
134*38fd1498Szrj static void link_btr_uses (btr_def **, btr_user **, sbitmap *, sbitmap *, int);
135*38fd1498Szrj static void build_btr_def_use_webs (btr_heap_t *);
136*38fd1498Szrj static int block_at_edge_of_live_range_p (int, btr_def *);
137*38fd1498Szrj static void clear_btr_from_live_range (btr_def *def);
138*38fd1498Szrj static void add_btr_to_live_range (btr_def *, int);
139*38fd1498Szrj static void augment_live_range (bitmap, HARD_REG_SET *, basic_block,
140*38fd1498Szrj 				basic_block, int);
141*38fd1498Szrj static int choose_btr (HARD_REG_SET);
142*38fd1498Szrj static void combine_btr_defs (btr_def *, HARD_REG_SET *);
143*38fd1498Szrj static void btr_def_live_range (btr_def *, HARD_REG_SET *);
144*38fd1498Szrj static void move_btr_def (basic_block, int, btr_def *, bitmap, HARD_REG_SET *);
145*38fd1498Szrj static int migrate_btr_def (btr_def *, int);
146*38fd1498Szrj static void migrate_btr_defs (enum reg_class, int);
147*38fd1498Szrj static int can_move_up (const_basic_block, const rtx_insn *, int);
148*38fd1498Szrj static void note_btr_set (rtx, const_rtx, void *);
149*38fd1498Szrj 
150*38fd1498Szrj /* The following code performs code motion of target load instructions
151*38fd1498Szrj    (instructions that set branch target registers), to move them
152*38fd1498Szrj    forward away from the branch instructions and out of loops (or,
153*38fd1498Szrj    more generally, from a more frequently executed place to a less
154*38fd1498Szrj    frequently executed place).
155*38fd1498Szrj    Moving target load instructions further in front of the branch
156*38fd1498Szrj    instruction that uses the target register value means that the hardware
157*38fd1498Szrj    has a better chance of preloading the instructions at the branch
158*38fd1498Szrj    target by the time the branch is reached.  This avoids bubbles
159*38fd1498Szrj    when a taken branch needs to flush out the pipeline.
160*38fd1498Szrj    Moving target load instructions out of loops means they are executed
161*38fd1498Szrj    less frequently.  */
162*38fd1498Szrj 
163*38fd1498Szrj /* An obstack to hold the def-use web data structures built up for
164*38fd1498Szrj    migrating branch target load instructions.  */
165*38fd1498Szrj static struct obstack migrate_btrl_obstack;
166*38fd1498Szrj 
167*38fd1498Szrj /* Array indexed by basic block number, giving the set of registers
168*38fd1498Szrj    live in that block.  */
169*38fd1498Szrj static HARD_REG_SET *btrs_live;
170*38fd1498Szrj 
171*38fd1498Szrj /* Array indexed by basic block number, giving the set of registers live at
172*38fd1498Szrj   the end of that block, including any uses by a final jump insn, if any.  */
173*38fd1498Szrj static HARD_REG_SET *btrs_live_at_end;
174*38fd1498Szrj 
175*38fd1498Szrj /* Set of all target registers that we are willing to allocate.  */
176*38fd1498Szrj static HARD_REG_SET all_btrs;
177*38fd1498Szrj 
178*38fd1498Szrj /* Provide lower and upper bounds for target register numbers, so that
179*38fd1498Szrj    we don't need to search through all the hard registers all the time.  */
180*38fd1498Szrj static int first_btr, last_btr;
181*38fd1498Szrj 
182*38fd1498Szrj 
183*38fd1498Szrj 
184*38fd1498Szrj /* Return an estimate of the frequency of execution of block bb.  */
185*38fd1498Szrj static int
basic_block_freq(const_basic_block bb)186*38fd1498Szrj basic_block_freq (const_basic_block bb)
187*38fd1498Szrj {
188*38fd1498Szrj   return bb->count.to_frequency (cfun);
189*38fd1498Szrj }
190*38fd1498Szrj 
191*38fd1498Szrj /* If the rtx at *XP references (sets or reads) any branch target
192*38fd1498Szrj    register, return one such register.  If EXCLUDEP is set, disregard
193*38fd1498Szrj    any references within that location.  */
194*38fd1498Szrj static rtx *
195*38fd1498Szrj find_btr_use (rtx *xp, rtx *excludep = 0)
196*38fd1498Szrj {
197*38fd1498Szrj   subrtx_ptr_iterator::array_type array;
FOR_EACH_SUBRTX_PTR(iter,array,xp,NONCONST)198*38fd1498Szrj   FOR_EACH_SUBRTX_PTR (iter, array, xp, NONCONST)
199*38fd1498Szrj     {
200*38fd1498Szrj       rtx *loc = *iter;
201*38fd1498Szrj       if (loc == excludep)
202*38fd1498Szrj 	iter.skip_subrtxes ();
203*38fd1498Szrj       else
204*38fd1498Szrj 	{
205*38fd1498Szrj 	  const_rtx x = *loc;
206*38fd1498Szrj 	  if (REG_P (x)
207*38fd1498Szrj 	      && overlaps_hard_reg_set_p (all_btrs, GET_MODE (x), REGNO (x)))
208*38fd1498Szrj 	    return loc;
209*38fd1498Szrj 	}
210*38fd1498Szrj     }
211*38fd1498Szrj   return 0;
212*38fd1498Szrj }
213*38fd1498Szrj 
214*38fd1498Szrj /* Return true if insn is an instruction that sets a target register.
215*38fd1498Szrj    if CHECK_CONST is true, only return true if the source is constant.
216*38fd1498Szrj    If such a set is found and REGNO is nonzero, assign the register number
217*38fd1498Szrj    of the destination register to *REGNO.  */
218*38fd1498Szrj static int
insn_sets_btr_p(const rtx_insn * insn,int check_const,int * regno)219*38fd1498Szrj insn_sets_btr_p (const rtx_insn *insn, int check_const, int *regno)
220*38fd1498Szrj {
221*38fd1498Szrj   rtx set;
222*38fd1498Szrj 
223*38fd1498Szrj   if (NONJUMP_INSN_P (insn)
224*38fd1498Szrj       && (set = single_set (insn)))
225*38fd1498Szrj     {
226*38fd1498Szrj       rtx dest = SET_DEST (set);
227*38fd1498Szrj       rtx src = SET_SRC (set);
228*38fd1498Szrj 
229*38fd1498Szrj       if (GET_CODE (dest) == SUBREG)
230*38fd1498Szrj 	dest = XEXP (dest, 0);
231*38fd1498Szrj 
232*38fd1498Szrj       if (REG_P (dest)
233*38fd1498Szrj 	  && TEST_HARD_REG_BIT (all_btrs, REGNO (dest)))
234*38fd1498Szrj 	{
235*38fd1498Szrj 	  gcc_assert (!find_btr_use (&src));
236*38fd1498Szrj 
237*38fd1498Szrj 	  if (!check_const || CONSTANT_P (src))
238*38fd1498Szrj 	    {
239*38fd1498Szrj 	      if (regno)
240*38fd1498Szrj 		*regno = REGNO (dest);
241*38fd1498Szrj 	      return 1;
242*38fd1498Szrj 	    }
243*38fd1498Szrj 	}
244*38fd1498Szrj     }
245*38fd1498Szrj   return 0;
246*38fd1498Szrj }
247*38fd1498Szrj 
248*38fd1498Szrj /* Find the group that the target register definition DEF belongs
249*38fd1498Szrj    to in the list starting with *ALL_BTR_DEF_GROUPS.  If no such
250*38fd1498Szrj    group exists, create one.  Add def to the group.  */
251*38fd1498Szrj static void
find_btr_def_group(btr_def_group ** all_btr_def_groups,btr_def * def)252*38fd1498Szrj find_btr_def_group (btr_def_group **all_btr_def_groups, btr_def *def)
253*38fd1498Szrj {
254*38fd1498Szrj   if (insn_sets_btr_p (def->insn, 1, NULL))
255*38fd1498Szrj     {
256*38fd1498Szrj       btr_def_group *this_group;
257*38fd1498Szrj       rtx def_src = SET_SRC (single_set (def->insn));
258*38fd1498Szrj 
259*38fd1498Szrj       /* ?? This linear search is an efficiency concern, particularly
260*38fd1498Szrj 	 as the search will almost always fail to find a match.  */
261*38fd1498Szrj       for (this_group = *all_btr_def_groups;
262*38fd1498Szrj 	   this_group != NULL;
263*38fd1498Szrj 	   this_group = this_group->next)
264*38fd1498Szrj 	if (rtx_equal_p (def_src, this_group->src))
265*38fd1498Szrj 	  break;
266*38fd1498Szrj 
267*38fd1498Szrj       if (!this_group)
268*38fd1498Szrj 	{
269*38fd1498Szrj 	  this_group = XOBNEW (&migrate_btrl_obstack, btr_def_group);
270*38fd1498Szrj 	  this_group->src = def_src;
271*38fd1498Szrj 	  this_group->members = NULL;
272*38fd1498Szrj 	  this_group->next = *all_btr_def_groups;
273*38fd1498Szrj 	  *all_btr_def_groups = this_group;
274*38fd1498Szrj 	}
275*38fd1498Szrj       def->group = this_group;
276*38fd1498Szrj       def->next_this_group = this_group->members;
277*38fd1498Szrj       this_group->members = def;
278*38fd1498Szrj     }
279*38fd1498Szrj   else
280*38fd1498Szrj     def->group = NULL;
281*38fd1498Szrj }
282*38fd1498Szrj 
283*38fd1498Szrj /* Create a new target register definition structure, for a definition in
284*38fd1498Szrj    block BB, instruction INSN, and insert it into ALL_BTR_DEFS.  Return
285*38fd1498Szrj    the new definition.  */
286*38fd1498Szrj static btr_def *
add_btr_def(btr_heap_t * all_btr_defs,basic_block bb,int insn_luid,rtx_insn * insn,unsigned int dest_reg,int other_btr_uses_before_def,btr_def_group ** all_btr_def_groups)287*38fd1498Szrj add_btr_def (btr_heap_t *all_btr_defs, basic_block bb, int insn_luid,
288*38fd1498Szrj 	     rtx_insn *insn,
289*38fd1498Szrj 	     unsigned int dest_reg, int other_btr_uses_before_def,
290*38fd1498Szrj 	     btr_def_group **all_btr_def_groups)
291*38fd1498Szrj {
292*38fd1498Szrj   btr_def *this_def = XOBNEW (&migrate_btrl_obstack, btr_def);
293*38fd1498Szrj   this_def->bb = bb;
294*38fd1498Szrj   this_def->luid = insn_luid;
295*38fd1498Szrj   this_def->insn = insn;
296*38fd1498Szrj   this_def->btr = dest_reg;
297*38fd1498Szrj   this_def->cost = basic_block_freq (bb);
298*38fd1498Szrj   this_def->has_ambiguous_use = 0;
299*38fd1498Szrj   this_def->other_btr_uses_before_def = other_btr_uses_before_def;
300*38fd1498Szrj   this_def->other_btr_uses_after_use = 0;
301*38fd1498Szrj   this_def->next_this_bb = NULL;
302*38fd1498Szrj   this_def->next_this_group = NULL;
303*38fd1498Szrj   this_def->uses = NULL;
304*38fd1498Szrj   this_def->live_range = NULL;
305*38fd1498Szrj   find_btr_def_group (all_btr_def_groups, this_def);
306*38fd1498Szrj 
307*38fd1498Szrj   all_btr_defs->insert (-this_def->cost, this_def);
308*38fd1498Szrj 
309*38fd1498Szrj   if (dump_file)
310*38fd1498Szrj     fprintf (dump_file,
311*38fd1498Szrj       "Found target reg definition: sets %u { bb %d, insn %d }%s priority %d\n",
312*38fd1498Szrj 	     dest_reg, bb->index, INSN_UID (insn),
313*38fd1498Szrj 	     (this_def->group ? "" : ":not const"), this_def->cost);
314*38fd1498Szrj 
315*38fd1498Szrj   return this_def;
316*38fd1498Szrj }
317*38fd1498Szrj 
318*38fd1498Szrj /* Create a new target register user structure, for a use in block BB,
319*38fd1498Szrj    instruction INSN.  Return the new user.  */
320*38fd1498Szrj static btr_user *
new_btr_user(basic_block bb,int insn_luid,rtx_insn * insn)321*38fd1498Szrj new_btr_user (basic_block bb, int insn_luid, rtx_insn *insn)
322*38fd1498Szrj {
323*38fd1498Szrj   /* This instruction reads target registers.  We need
324*38fd1498Szrj      to decide whether we can replace all target register
325*38fd1498Szrj      uses easily.
326*38fd1498Szrj    */
327*38fd1498Szrj   rtx *usep = find_btr_use (&PATTERN (insn));
328*38fd1498Szrj   rtx use;
329*38fd1498Szrj   btr_user *user = NULL;
330*38fd1498Szrj 
331*38fd1498Szrj   if (usep)
332*38fd1498Szrj     {
333*38fd1498Szrj       int unambiguous_single_use;
334*38fd1498Szrj 
335*38fd1498Szrj       /* We want to ensure that USE is the only use of a target
336*38fd1498Szrj 	 register in INSN, so that we know that to rewrite INSN to use
337*38fd1498Szrj 	 a different target register, all we have to do is replace USE.  */
338*38fd1498Szrj       unambiguous_single_use = !find_btr_use (&PATTERN (insn), usep);
339*38fd1498Szrj       if (!unambiguous_single_use)
340*38fd1498Szrj 	usep = NULL;
341*38fd1498Szrj     }
342*38fd1498Szrj   use = usep ? *usep : NULL_RTX;
343*38fd1498Szrj   user = XOBNEW (&migrate_btrl_obstack, btr_user);
344*38fd1498Szrj   user->bb = bb;
345*38fd1498Szrj   user->luid = insn_luid;
346*38fd1498Szrj   user->insn = insn;
347*38fd1498Szrj   user->use = use;
348*38fd1498Szrj   user->other_use_this_block = 0;
349*38fd1498Szrj   user->next = NULL;
350*38fd1498Szrj   user->n_reaching_defs = 0;
351*38fd1498Szrj   user->first_reaching_def = -1;
352*38fd1498Szrj 
353*38fd1498Szrj   if (dump_file)
354*38fd1498Szrj     {
355*38fd1498Szrj       fprintf (dump_file, "Uses target reg: { bb %d, insn %d }",
356*38fd1498Szrj 	       bb->index, INSN_UID (insn));
357*38fd1498Szrj 
358*38fd1498Szrj       if (user->use)
359*38fd1498Szrj 	fprintf (dump_file, ": unambiguous use of reg %d\n",
360*38fd1498Szrj 		 REGNO (user->use));
361*38fd1498Szrj     }
362*38fd1498Szrj 
363*38fd1498Szrj   return user;
364*38fd1498Szrj }
365*38fd1498Szrj 
366*38fd1498Szrj /* Write the contents of S to the dump file.  */
367*38fd1498Szrj static void
dump_hard_reg_set(HARD_REG_SET s)368*38fd1498Szrj dump_hard_reg_set (HARD_REG_SET s)
369*38fd1498Szrj {
370*38fd1498Szrj   int reg;
371*38fd1498Szrj   for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
372*38fd1498Szrj     if (TEST_HARD_REG_BIT (s, reg))
373*38fd1498Szrj       fprintf (dump_file, " %d", reg);
374*38fd1498Szrj }
375*38fd1498Szrj 
376*38fd1498Szrj /* Write the set of target regs live in block BB to the dump file.  */
377*38fd1498Szrj static void
dump_btrs_live(int bb)378*38fd1498Szrj dump_btrs_live (int bb)
379*38fd1498Szrj {
380*38fd1498Szrj   fprintf (dump_file, "BB%d live:", bb);
381*38fd1498Szrj   dump_hard_reg_set (btrs_live[bb]);
382*38fd1498Szrj   fprintf (dump_file, "\n");
383*38fd1498Szrj }
384*38fd1498Szrj 
385*38fd1498Szrj /* REGNO is the number of a branch target register that is being used or
386*38fd1498Szrj    set.  USERS_THIS_BB is a list of preceding branch target register users;
387*38fd1498Szrj    If any of them use the same register, set their other_use_this_block
388*38fd1498Szrj    flag.  */
389*38fd1498Szrj static void
note_other_use_this_block(unsigned int regno,btr_user * users_this_bb)390*38fd1498Szrj note_other_use_this_block (unsigned int regno, btr_user *users_this_bb)
391*38fd1498Szrj {
392*38fd1498Szrj   btr_user *user;
393*38fd1498Szrj 
394*38fd1498Szrj   for (user = users_this_bb; user != NULL; user = user->next)
395*38fd1498Szrj     if (user->use && REGNO (user->use) == regno)
396*38fd1498Szrj       user->other_use_this_block = 1;
397*38fd1498Szrj }
398*38fd1498Szrj 
399*38fd1498Szrj struct defs_uses_info {
400*38fd1498Szrj   btr_user *users_this_bb;
401*38fd1498Szrj   HARD_REG_SET btrs_written_in_block;
402*38fd1498Szrj   HARD_REG_SET btrs_live_in_block;
403*38fd1498Szrj   sbitmap bb_gen;
404*38fd1498Szrj   sbitmap *btr_defset;
405*38fd1498Szrj };
406*38fd1498Szrj 
407*38fd1498Szrj /* Called via note_stores or directly to register stores into /
408*38fd1498Szrj    clobbers of a branch target register DEST that are not recognized as
409*38fd1498Szrj    straightforward definitions.  DATA points to information about the
410*38fd1498Szrj    current basic block that needs updating.  */
411*38fd1498Szrj static void
note_btr_set(rtx dest,const_rtx set ATTRIBUTE_UNUSED,void * data)412*38fd1498Szrj note_btr_set (rtx dest, const_rtx set ATTRIBUTE_UNUSED, void *data)
413*38fd1498Szrj {
414*38fd1498Szrj   defs_uses_info *info = (defs_uses_info *) data;
415*38fd1498Szrj   int regno, end_regno;
416*38fd1498Szrj 
417*38fd1498Szrj   if (!REG_P (dest))
418*38fd1498Szrj     return;
419*38fd1498Szrj   regno = REGNO (dest);
420*38fd1498Szrj   end_regno = END_REGNO (dest);
421*38fd1498Szrj   for (; regno < end_regno; regno++)
422*38fd1498Szrj     if (TEST_HARD_REG_BIT (all_btrs, regno))
423*38fd1498Szrj       {
424*38fd1498Szrj 	note_other_use_this_block (regno, info->users_this_bb);
425*38fd1498Szrj 	SET_HARD_REG_BIT (info->btrs_written_in_block, regno);
426*38fd1498Szrj 	SET_HARD_REG_BIT (info->btrs_live_in_block, regno);
427*38fd1498Szrj 	bitmap_and_compl (info->bb_gen, info->bb_gen,
428*38fd1498Szrj 			    info->btr_defset[regno - first_btr]);
429*38fd1498Szrj       }
430*38fd1498Szrj }
431*38fd1498Szrj 
432*38fd1498Szrj static void
compute_defs_uses_and_gen(btr_heap_t * all_btr_defs,btr_def ** def_array,btr_user ** use_array,sbitmap * btr_defset,sbitmap * bb_gen,HARD_REG_SET * btrs_written)433*38fd1498Szrj compute_defs_uses_and_gen (btr_heap_t *all_btr_defs, btr_def **def_array,
434*38fd1498Szrj 			   btr_user **use_array, sbitmap *btr_defset,
435*38fd1498Szrj 			   sbitmap *bb_gen, HARD_REG_SET *btrs_written)
436*38fd1498Szrj {
437*38fd1498Szrj   /* Scan the code building up the set of all defs and all uses.
438*38fd1498Szrj      For each target register, build the set of defs of that register.
439*38fd1498Szrj      For each block, calculate the set of target registers
440*38fd1498Szrj      written in that block.
441*38fd1498Szrj      Also calculate the set of btrs ever live in that block.
442*38fd1498Szrj   */
443*38fd1498Szrj   int i;
444*38fd1498Szrj   int insn_luid = 0;
445*38fd1498Szrj   btr_def_group *all_btr_def_groups = NULL;
446*38fd1498Szrj   defs_uses_info info;
447*38fd1498Szrj 
448*38fd1498Szrj   bitmap_vector_clear (bb_gen, last_basic_block_for_fn (cfun));
449*38fd1498Szrj   for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
450*38fd1498Szrj     {
451*38fd1498Szrj       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
452*38fd1498Szrj       int reg;
453*38fd1498Szrj       btr_def *defs_this_bb = NULL;
454*38fd1498Szrj       rtx_insn *insn;
455*38fd1498Szrj       rtx_insn *last;
456*38fd1498Szrj       int can_throw = 0;
457*38fd1498Szrj 
458*38fd1498Szrj       info.users_this_bb = NULL;
459*38fd1498Szrj       info.bb_gen = bb_gen[i];
460*38fd1498Szrj       info.btr_defset = btr_defset;
461*38fd1498Szrj 
462*38fd1498Szrj       CLEAR_HARD_REG_SET (info.btrs_live_in_block);
463*38fd1498Szrj       CLEAR_HARD_REG_SET (info.btrs_written_in_block);
464*38fd1498Szrj       for (reg = first_btr; reg <= last_btr; reg++)
465*38fd1498Szrj 	if (TEST_HARD_REG_BIT (all_btrs, reg)
466*38fd1498Szrj 	    && REGNO_REG_SET_P (df_get_live_in (bb), reg))
467*38fd1498Szrj 	  SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
468*38fd1498Szrj 
469*38fd1498Szrj       for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
470*38fd1498Szrj 	   insn != last;
471*38fd1498Szrj 	   insn = NEXT_INSN (insn), insn_luid++)
472*38fd1498Szrj 	{
473*38fd1498Szrj 	  if (INSN_P (insn))
474*38fd1498Szrj 	    {
475*38fd1498Szrj 	      int regno;
476*38fd1498Szrj 	      int insn_uid = INSN_UID (insn);
477*38fd1498Szrj 
478*38fd1498Szrj 	      if (insn_sets_btr_p (insn, 0, &regno))
479*38fd1498Szrj 		{
480*38fd1498Szrj 		  btr_def *def = add_btr_def (
481*38fd1498Szrj 		      all_btr_defs, bb, insn_luid, insn, regno,
482*38fd1498Szrj 		      TEST_HARD_REG_BIT (info.btrs_live_in_block, regno),
483*38fd1498Szrj 		      &all_btr_def_groups);
484*38fd1498Szrj 
485*38fd1498Szrj 		  def_array[insn_uid] = def;
486*38fd1498Szrj 		  SET_HARD_REG_BIT (info.btrs_written_in_block, regno);
487*38fd1498Szrj 		  SET_HARD_REG_BIT (info.btrs_live_in_block, regno);
488*38fd1498Szrj 		  bitmap_and_compl (bb_gen[i], bb_gen[i],
489*38fd1498Szrj 				      btr_defset[regno - first_btr]);
490*38fd1498Szrj 		  bitmap_set_bit (bb_gen[i], insn_uid);
491*38fd1498Szrj 		  def->next_this_bb = defs_this_bb;
492*38fd1498Szrj 		  defs_this_bb = def;
493*38fd1498Szrj 		  bitmap_set_bit (btr_defset[regno - first_btr], insn_uid);
494*38fd1498Szrj 		  note_other_use_this_block (regno, info.users_this_bb);
495*38fd1498Szrj 		}
496*38fd1498Szrj 	      /* Check for the blockage emitted by expand_nl_goto_receiver.  */
497*38fd1498Szrj 	      else if (cfun->has_nonlocal_label
498*38fd1498Szrj 		       && GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE)
499*38fd1498Szrj 		{
500*38fd1498Szrj 		  btr_user *user;
501*38fd1498Szrj 
502*38fd1498Szrj 		  /* Do the equivalent of calling note_other_use_this_block
503*38fd1498Szrj 		     for every target register.  */
504*38fd1498Szrj 		  for (user = info.users_this_bb; user != NULL;
505*38fd1498Szrj 		       user = user->next)
506*38fd1498Szrj 		    if (user->use)
507*38fd1498Szrj 		      user->other_use_this_block = 1;
508*38fd1498Szrj 		  IOR_HARD_REG_SET (info.btrs_written_in_block, all_btrs);
509*38fd1498Szrj 		  IOR_HARD_REG_SET (info.btrs_live_in_block, all_btrs);
510*38fd1498Szrj 		  bitmap_clear (info.bb_gen);
511*38fd1498Szrj 		}
512*38fd1498Szrj 	      else
513*38fd1498Szrj 		{
514*38fd1498Szrj 		  if (find_btr_use (&PATTERN (insn)))
515*38fd1498Szrj 		    {
516*38fd1498Szrj 		      btr_user *user = new_btr_user (bb, insn_luid, insn);
517*38fd1498Szrj 
518*38fd1498Szrj 		      use_array[insn_uid] = user;
519*38fd1498Szrj 		      if (user->use)
520*38fd1498Szrj 			SET_HARD_REG_BIT (info.btrs_live_in_block,
521*38fd1498Szrj 					  REGNO (user->use));
522*38fd1498Szrj 		      else
523*38fd1498Szrj 			{
524*38fd1498Szrj 			  int reg;
525*38fd1498Szrj 			  for (reg = first_btr; reg <= last_btr; reg++)
526*38fd1498Szrj 			    if (TEST_HARD_REG_BIT (all_btrs, reg)
527*38fd1498Szrj 				&& refers_to_regno_p (reg, user->insn))
528*38fd1498Szrj 			      {
529*38fd1498Szrj 				note_other_use_this_block (reg,
530*38fd1498Szrj 							   info.users_this_bb);
531*38fd1498Szrj 				SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
532*38fd1498Szrj 			      }
533*38fd1498Szrj 			  note_stores (PATTERN (insn), note_btr_set, &info);
534*38fd1498Szrj 			}
535*38fd1498Szrj 		      user->next = info.users_this_bb;
536*38fd1498Szrj 		      info.users_this_bb = user;
537*38fd1498Szrj 		    }
538*38fd1498Szrj 		  if (CALL_P (insn))
539*38fd1498Szrj 		    {
540*38fd1498Szrj 		      HARD_REG_SET *clobbered = &call_used_reg_set;
541*38fd1498Szrj 		      HARD_REG_SET call_saved;
542*38fd1498Szrj 		      rtx pat = PATTERN (insn);
543*38fd1498Szrj 		      int i;
544*38fd1498Szrj 
545*38fd1498Szrj 		      /* Check for sibcall.  */
546*38fd1498Szrj 		      if (GET_CODE (pat) == PARALLEL)
547*38fd1498Szrj 			for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
548*38fd1498Szrj 			  if (ANY_RETURN_P (XVECEXP (pat, 0, i)))
549*38fd1498Szrj 			    {
550*38fd1498Szrj 			      COMPL_HARD_REG_SET (call_saved,
551*38fd1498Szrj 						  call_used_reg_set);
552*38fd1498Szrj 			      clobbered = &call_saved;
553*38fd1498Szrj 			    }
554*38fd1498Szrj 
555*38fd1498Szrj 		      for (regno = first_btr; regno <= last_btr; regno++)
556*38fd1498Szrj 			if (TEST_HARD_REG_BIT (*clobbered, regno))
557*38fd1498Szrj 			  note_btr_set (regno_reg_rtx[regno], NULL_RTX, &info);
558*38fd1498Szrj 		    }
559*38fd1498Szrj 		}
560*38fd1498Szrj 	    }
561*38fd1498Szrj 	}
562*38fd1498Szrj 
563*38fd1498Szrj       COPY_HARD_REG_SET (btrs_live[i], info.btrs_live_in_block);
564*38fd1498Szrj       COPY_HARD_REG_SET (btrs_written[i], info.btrs_written_in_block);
565*38fd1498Szrj 
566*38fd1498Szrj       REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], df_get_live_out (bb));
567*38fd1498Szrj       /* If this block ends in a jump insn, add any uses or even clobbers
568*38fd1498Szrj 	 of branch target registers that it might have.  */
569*38fd1498Szrj       for (insn = BB_END (bb); insn != BB_HEAD (bb) && ! INSN_P (insn); )
570*38fd1498Szrj 	insn = PREV_INSN (insn);
571*38fd1498Szrj       /* ??? for the fall-through edge, it would make sense to insert the
572*38fd1498Szrj 	 btr set on the edge, but that would require to split the block
573*38fd1498Szrj 	 early on so that we can distinguish between dominance from the fall
574*38fd1498Szrj 	 through edge - which can use the call-clobbered registers - from
575*38fd1498Szrj 	 dominance by the throw edge.  */
576*38fd1498Szrj       if (can_throw_internal (insn))
577*38fd1498Szrj 	{
578*38fd1498Szrj 	  HARD_REG_SET tmp;
579*38fd1498Szrj 
580*38fd1498Szrj 	  COPY_HARD_REG_SET (tmp, call_used_reg_set);
581*38fd1498Szrj 	  AND_HARD_REG_SET (tmp, all_btrs);
582*38fd1498Szrj 	  IOR_HARD_REG_SET (btrs_live_at_end[i], tmp);
583*38fd1498Szrj 	  can_throw = 1;
584*38fd1498Szrj 	}
585*38fd1498Szrj       if (can_throw || JUMP_P (insn))
586*38fd1498Szrj 	{
587*38fd1498Szrj 	  int regno;
588*38fd1498Szrj 
589*38fd1498Szrj 	  for (regno = first_btr; regno <= last_btr; regno++)
590*38fd1498Szrj 	    if (refers_to_regno_p (regno, insn))
591*38fd1498Szrj 	      SET_HARD_REG_BIT (btrs_live_at_end[i], regno);
592*38fd1498Szrj 	}
593*38fd1498Szrj 
594*38fd1498Szrj       if (dump_file)
595*38fd1498Szrj 	dump_btrs_live (i);
596*38fd1498Szrj     }
597*38fd1498Szrj }
598*38fd1498Szrj 
599*38fd1498Szrj static void
compute_kill(sbitmap * bb_kill,sbitmap * btr_defset,HARD_REG_SET * btrs_written)600*38fd1498Szrj compute_kill (sbitmap *bb_kill, sbitmap *btr_defset,
601*38fd1498Szrj 	      HARD_REG_SET *btrs_written)
602*38fd1498Szrj {
603*38fd1498Szrj   int i;
604*38fd1498Szrj   int regno;
605*38fd1498Szrj 
606*38fd1498Szrj   /* For each basic block, form the set BB_KILL - the set
607*38fd1498Szrj      of definitions that the block kills.  */
608*38fd1498Szrj   bitmap_vector_clear (bb_kill, last_basic_block_for_fn (cfun));
609*38fd1498Szrj   for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
610*38fd1498Szrj     {
611*38fd1498Szrj       for (regno = first_btr; regno <= last_btr; regno++)
612*38fd1498Szrj 	if (TEST_HARD_REG_BIT (all_btrs, regno)
613*38fd1498Szrj 	    && TEST_HARD_REG_BIT (btrs_written[i], regno))
614*38fd1498Szrj 	  bitmap_ior (bb_kill[i], bb_kill[i],
615*38fd1498Szrj 			  btr_defset[regno - first_btr]);
616*38fd1498Szrj     }
617*38fd1498Szrj }
618*38fd1498Szrj 
619*38fd1498Szrj static void
compute_out(sbitmap * bb_out,sbitmap * bb_gen,sbitmap * bb_kill,int max_uid)620*38fd1498Szrj compute_out (sbitmap *bb_out, sbitmap *bb_gen, sbitmap *bb_kill, int max_uid)
621*38fd1498Szrj {
622*38fd1498Szrj   /* Perform iterative dataflow:
623*38fd1498Szrj       Initially, for all blocks, BB_OUT = BB_GEN.
624*38fd1498Szrj       For each block,
625*38fd1498Szrj 	BB_IN  = union over predecessors of BB_OUT(pred)
626*38fd1498Szrj 	BB_OUT = (BB_IN - BB_KILL) + BB_GEN
627*38fd1498Szrj      Iterate until the bb_out sets stop growing.  */
628*38fd1498Szrj   int i;
629*38fd1498Szrj   int changed;
630*38fd1498Szrj   auto_sbitmap bb_in (max_uid);
631*38fd1498Szrj 
632*38fd1498Szrj   for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
633*38fd1498Szrj     bitmap_copy (bb_out[i], bb_gen[i]);
634*38fd1498Szrj 
635*38fd1498Szrj   changed = 1;
636*38fd1498Szrj   while (changed)
637*38fd1498Szrj     {
638*38fd1498Szrj       changed = 0;
639*38fd1498Szrj       for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
640*38fd1498Szrj 	{
641*38fd1498Szrj 	  bitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK_FOR_FN (cfun, i));
642*38fd1498Szrj 	  changed |= bitmap_ior_and_compl (bb_out[i], bb_gen[i],
643*38fd1498Szrj 					       bb_in, bb_kill[i]);
644*38fd1498Szrj 	}
645*38fd1498Szrj     }
646*38fd1498Szrj }
647*38fd1498Szrj 
648*38fd1498Szrj static void
link_btr_uses(btr_def ** def_array,btr_user ** use_array,sbitmap * bb_out,sbitmap * btr_defset,int max_uid)649*38fd1498Szrj link_btr_uses (btr_def **def_array, btr_user **use_array, sbitmap *bb_out,
650*38fd1498Szrj 	       sbitmap *btr_defset, int max_uid)
651*38fd1498Szrj {
652*38fd1498Szrj   int i;
653*38fd1498Szrj   auto_sbitmap reaching_defs (max_uid);
654*38fd1498Szrj 
655*38fd1498Szrj   /* Link uses to the uses lists of all of their reaching defs.
656*38fd1498Szrj      Count up the number of reaching defs of each use.  */
657*38fd1498Szrj   for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
658*38fd1498Szrj     {
659*38fd1498Szrj       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
660*38fd1498Szrj       rtx_insn *insn;
661*38fd1498Szrj       rtx_insn *last;
662*38fd1498Szrj 
663*38fd1498Szrj       bitmap_union_of_preds (reaching_defs, bb_out, BASIC_BLOCK_FOR_FN (cfun, i));
664*38fd1498Szrj       for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
665*38fd1498Szrj 	   insn != last;
666*38fd1498Szrj 	   insn = NEXT_INSN (insn))
667*38fd1498Szrj 	{
668*38fd1498Szrj 	  if (INSN_P (insn))
669*38fd1498Szrj 	    {
670*38fd1498Szrj 	      int insn_uid = INSN_UID (insn);
671*38fd1498Szrj 
672*38fd1498Szrj 	      btr_def *def   = def_array[insn_uid];
673*38fd1498Szrj 	      btr_user *user = use_array[insn_uid];
674*38fd1498Szrj 	      if (def != NULL)
675*38fd1498Szrj 		{
676*38fd1498Szrj 		  /* Remove all reaching defs of regno except
677*38fd1498Szrj 		     for this one.  */
678*38fd1498Szrj 		  bitmap_and_compl (reaching_defs, reaching_defs,
679*38fd1498Szrj 				      btr_defset[def->btr - first_btr]);
680*38fd1498Szrj 		  bitmap_set_bit (reaching_defs, insn_uid);
681*38fd1498Szrj 		}
682*38fd1498Szrj 
683*38fd1498Szrj 	      if (user != NULL)
684*38fd1498Szrj 		{
685*38fd1498Szrj 		  /* Find all the reaching defs for this use.  */
686*38fd1498Szrj 		  auto_sbitmap reaching_defs_of_reg (max_uid);
687*38fd1498Szrj 		  unsigned int uid = 0;
688*38fd1498Szrj 		  sbitmap_iterator sbi;
689*38fd1498Szrj 
690*38fd1498Szrj 		  if (user->use)
691*38fd1498Szrj 		    bitmap_and (
692*38fd1498Szrj 		      reaching_defs_of_reg,
693*38fd1498Szrj 		      reaching_defs,
694*38fd1498Szrj 		      btr_defset[REGNO (user->use) - first_btr]);
695*38fd1498Szrj 		  else
696*38fd1498Szrj 		    {
697*38fd1498Szrj 		      int reg;
698*38fd1498Szrj 
699*38fd1498Szrj 		      bitmap_clear (reaching_defs_of_reg);
700*38fd1498Szrj 		      for (reg = first_btr; reg <= last_btr; reg++)
701*38fd1498Szrj 			if (TEST_HARD_REG_BIT (all_btrs, reg)
702*38fd1498Szrj 			    && refers_to_regno_p (reg, user->insn))
703*38fd1498Szrj 			  bitmap_or_and (reaching_defs_of_reg,
704*38fd1498Szrj 			    reaching_defs_of_reg,
705*38fd1498Szrj 			    reaching_defs,
706*38fd1498Szrj 			    btr_defset[reg - first_btr]);
707*38fd1498Szrj 		    }
708*38fd1498Szrj 		  EXECUTE_IF_SET_IN_BITMAP (reaching_defs_of_reg, 0, uid, sbi)
709*38fd1498Szrj 		    {
710*38fd1498Szrj 		      btr_def *def = def_array[uid];
711*38fd1498Szrj 
712*38fd1498Szrj 		      /* We now know that def reaches user.  */
713*38fd1498Szrj 
714*38fd1498Szrj 		      if (dump_file)
715*38fd1498Szrj 			fprintf (dump_file,
716*38fd1498Szrj 			  "Def in insn %d reaches use in insn %d\n",
717*38fd1498Szrj 			  uid, insn_uid);
718*38fd1498Szrj 
719*38fd1498Szrj 		      user->n_reaching_defs++;
720*38fd1498Szrj 		      if (!user->use)
721*38fd1498Szrj 			def->has_ambiguous_use = 1;
722*38fd1498Szrj 		      if (user->first_reaching_def != -1)
723*38fd1498Szrj 			{ /* There is more than one reaching def.  This is
724*38fd1498Szrj 			     a rare case, so just give up on this def/use
725*38fd1498Szrj 			     web when it occurs.  */
726*38fd1498Szrj 			  def->has_ambiguous_use = 1;
727*38fd1498Szrj 			  def_array[user->first_reaching_def]
728*38fd1498Szrj 			    ->has_ambiguous_use = 1;
729*38fd1498Szrj 			  if (dump_file)
730*38fd1498Szrj 			    fprintf (dump_file,
731*38fd1498Szrj 				     "(use %d has multiple reaching defs)\n",
732*38fd1498Szrj 				     insn_uid);
733*38fd1498Szrj 			}
734*38fd1498Szrj 		      else
735*38fd1498Szrj 			user->first_reaching_def = uid;
736*38fd1498Szrj 		      if (user->other_use_this_block)
737*38fd1498Szrj 			def->other_btr_uses_after_use = 1;
738*38fd1498Szrj 		      user->next = def->uses;
739*38fd1498Szrj 		      def->uses = user;
740*38fd1498Szrj 		    }
741*38fd1498Szrj 		}
742*38fd1498Szrj 
743*38fd1498Szrj 	      if (CALL_P (insn))
744*38fd1498Szrj 		{
745*38fd1498Szrj 		  int regno;
746*38fd1498Szrj 
747*38fd1498Szrj 		  for (regno = first_btr; regno <= last_btr; regno++)
748*38fd1498Szrj 		    if (TEST_HARD_REG_BIT (all_btrs, regno)
749*38fd1498Szrj 			&& TEST_HARD_REG_BIT (call_used_reg_set, regno))
750*38fd1498Szrj 		      bitmap_and_compl (reaching_defs, reaching_defs,
751*38fd1498Szrj 					  btr_defset[regno - first_btr]);
752*38fd1498Szrj 		}
753*38fd1498Szrj 	    }
754*38fd1498Szrj 	}
755*38fd1498Szrj     }
756*38fd1498Szrj }
757*38fd1498Szrj 
758*38fd1498Szrj static void
build_btr_def_use_webs(btr_heap_t * all_btr_defs)759*38fd1498Szrj build_btr_def_use_webs (btr_heap_t *all_btr_defs)
760*38fd1498Szrj {
761*38fd1498Szrj   const int max_uid = get_max_uid ();
762*38fd1498Szrj   btr_def  **def_array   = XCNEWVEC (btr_def *, max_uid);
763*38fd1498Szrj   btr_user **use_array   = XCNEWVEC (btr_user *, max_uid);
764*38fd1498Szrj   sbitmap *btr_defset   = sbitmap_vector_alloc (
765*38fd1498Szrj 			   (last_btr - first_btr) + 1, max_uid);
766*38fd1498Szrj   sbitmap *bb_gen = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
767*38fd1498Szrj 					  max_uid);
768*38fd1498Szrj   HARD_REG_SET *btrs_written = XCNEWVEC (HARD_REG_SET,
769*38fd1498Szrj 					 last_basic_block_for_fn (cfun));
770*38fd1498Szrj   sbitmap *bb_kill;
771*38fd1498Szrj   sbitmap *bb_out;
772*38fd1498Szrj 
773*38fd1498Szrj   bitmap_vector_clear (btr_defset, (last_btr - first_btr) + 1);
774*38fd1498Szrj 
775*38fd1498Szrj   compute_defs_uses_and_gen (all_btr_defs, def_array, use_array, btr_defset,
776*38fd1498Szrj 			     bb_gen, btrs_written);
777*38fd1498Szrj 
778*38fd1498Szrj   bb_kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), max_uid);
779*38fd1498Szrj   compute_kill (bb_kill, btr_defset, btrs_written);
780*38fd1498Szrj   free (btrs_written);
781*38fd1498Szrj 
782*38fd1498Szrj   bb_out = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), max_uid);
783*38fd1498Szrj   compute_out (bb_out, bb_gen, bb_kill, max_uid);
784*38fd1498Szrj 
785*38fd1498Szrj   sbitmap_vector_free (bb_gen);
786*38fd1498Szrj   sbitmap_vector_free (bb_kill);
787*38fd1498Szrj 
788*38fd1498Szrj   link_btr_uses (def_array, use_array, bb_out, btr_defset, max_uid);
789*38fd1498Szrj 
790*38fd1498Szrj   sbitmap_vector_free (bb_out);
791*38fd1498Szrj   sbitmap_vector_free (btr_defset);
792*38fd1498Szrj   free (use_array);
793*38fd1498Szrj   free (def_array);
794*38fd1498Szrj }
795*38fd1498Szrj 
796*38fd1498Szrj /* Return true if basic block BB contains the start or end of the
797*38fd1498Szrj    live range of the definition DEF, AND there are other live
798*38fd1498Szrj    ranges of the same target register that include BB.  */
799*38fd1498Szrj static int
block_at_edge_of_live_range_p(int bb,btr_def * def)800*38fd1498Szrj block_at_edge_of_live_range_p (int bb, btr_def *def)
801*38fd1498Szrj {
802*38fd1498Szrj   if (def->other_btr_uses_before_def
803*38fd1498Szrj       && BASIC_BLOCK_FOR_FN (cfun, bb) == def->bb)
804*38fd1498Szrj     return 1;
805*38fd1498Szrj   else if (def->other_btr_uses_after_use)
806*38fd1498Szrj     {
807*38fd1498Szrj       btr_user *user;
808*38fd1498Szrj       for (user = def->uses; user != NULL; user = user->next)
809*38fd1498Szrj 	if (BASIC_BLOCK_FOR_FN (cfun, bb) == user->bb)
810*38fd1498Szrj 	  return 1;
811*38fd1498Szrj     }
812*38fd1498Szrj   return 0;
813*38fd1498Szrj }
814*38fd1498Szrj 
815*38fd1498Szrj /* We are removing the def/use web DEF.  The target register
816*38fd1498Szrj    used in this web is therefore no longer live in the live range
817*38fd1498Szrj    of this web, so remove it from the live set of all basic blocks
818*38fd1498Szrj    in the live range of the web.
819*38fd1498Szrj    Blocks at the boundary of the live range may contain other live
820*38fd1498Szrj    ranges for the same target register, so we have to be careful
821*38fd1498Szrj    to remove the target register from the live set of these blocks
822*38fd1498Szrj    only if they do not contain other live ranges for the same register.  */
823*38fd1498Szrj static void
clear_btr_from_live_range(btr_def * def)824*38fd1498Szrj clear_btr_from_live_range (btr_def *def)
825*38fd1498Szrj {
826*38fd1498Szrj   unsigned bb;
827*38fd1498Szrj   bitmap_iterator bi;
828*38fd1498Szrj 
829*38fd1498Szrj   EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
830*38fd1498Szrj     {
831*38fd1498Szrj       if ((!def->other_btr_uses_before_def
832*38fd1498Szrj 	   && !def->other_btr_uses_after_use)
833*38fd1498Szrj 	  || !block_at_edge_of_live_range_p (bb, def))
834*38fd1498Szrj 	{
835*38fd1498Szrj 	  CLEAR_HARD_REG_BIT (btrs_live[bb], def->btr);
836*38fd1498Szrj 	  CLEAR_HARD_REG_BIT (btrs_live_at_end[bb], def->btr);
837*38fd1498Szrj 	  if (dump_file)
838*38fd1498Szrj 	    dump_btrs_live (bb);
839*38fd1498Szrj 	}
840*38fd1498Szrj     }
841*38fd1498Szrj  if (def->own_end)
842*38fd1498Szrj    CLEAR_HARD_REG_BIT (btrs_live_at_end[def->bb->index], def->btr);
843*38fd1498Szrj }
844*38fd1498Szrj 
845*38fd1498Szrj 
846*38fd1498Szrj /* We are adding the def/use web DEF.  Add the target register used
847*38fd1498Szrj    in this web to the live set of all of the basic blocks that contain
848*38fd1498Szrj    the live range of the web.
849*38fd1498Szrj    If OWN_END is set, also show that the register is live from our
850*38fd1498Szrj    definitions at the end of the basic block where it is defined.  */
851*38fd1498Szrj static void
add_btr_to_live_range(btr_def * def,int own_end)852*38fd1498Szrj add_btr_to_live_range (btr_def *def, int own_end)
853*38fd1498Szrj {
854*38fd1498Szrj   unsigned bb;
855*38fd1498Szrj   bitmap_iterator bi;
856*38fd1498Szrj 
857*38fd1498Szrj   EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
858*38fd1498Szrj     {
859*38fd1498Szrj       SET_HARD_REG_BIT (btrs_live[bb], def->btr);
860*38fd1498Szrj       SET_HARD_REG_BIT (btrs_live_at_end[bb], def->btr);
861*38fd1498Szrj       if (dump_file)
862*38fd1498Szrj 	dump_btrs_live (bb);
863*38fd1498Szrj     }
864*38fd1498Szrj   if (own_end)
865*38fd1498Szrj     {
866*38fd1498Szrj       SET_HARD_REG_BIT (btrs_live_at_end[def->bb->index], def->btr);
867*38fd1498Szrj       def->own_end = 1;
868*38fd1498Szrj     }
869*38fd1498Szrj }
870*38fd1498Szrj 
871*38fd1498Szrj /* Update a live range to contain the basic block NEW_BLOCK, and all
872*38fd1498Szrj    blocks on paths between the existing live range and NEW_BLOCK.
873*38fd1498Szrj    HEAD is a block contained in the existing live range that dominates
874*38fd1498Szrj    all other blocks in the existing live range.
875*38fd1498Szrj    Also add to the set BTRS_LIVE_IN_RANGE all target registers that
876*38fd1498Szrj    are live in the blocks that we add to the live range.
877*38fd1498Szrj    If FULL_RANGE is set, include the full live range of NEW_BB;
878*38fd1498Szrj    otherwise, if NEW_BB dominates HEAD_BB, only add registers that
879*38fd1498Szrj    are life at the end of NEW_BB for NEW_BB itself.
880*38fd1498Szrj    It is a precondition that either NEW_BLOCK dominates HEAD,or
881*38fd1498Szrj    HEAD dom NEW_BLOCK.  This is used to speed up the
882*38fd1498Szrj    implementation of this function.  */
883*38fd1498Szrj static void
augment_live_range(bitmap live_range,HARD_REG_SET * btrs_live_in_range,basic_block head_bb,basic_block new_bb,int full_range)884*38fd1498Szrj augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
885*38fd1498Szrj 		    basic_block head_bb, basic_block new_bb, int full_range)
886*38fd1498Szrj {
887*38fd1498Szrj   basic_block *worklist, *tos;
888*38fd1498Szrj 
889*38fd1498Szrj   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
890*38fd1498Szrj 
891*38fd1498Szrj   if (dominated_by_p (CDI_DOMINATORS, new_bb, head_bb))
892*38fd1498Szrj     {
893*38fd1498Szrj       if (new_bb == head_bb)
894*38fd1498Szrj 	{
895*38fd1498Szrj 	  if (full_range)
896*38fd1498Szrj 	    IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_bb->index]);
897*38fd1498Szrj 	  free (tos);
898*38fd1498Szrj 	  return;
899*38fd1498Szrj 	}
900*38fd1498Szrj       *tos++ = new_bb;
901*38fd1498Szrj     }
902*38fd1498Szrj   else
903*38fd1498Szrj     {
904*38fd1498Szrj       edge e;
905*38fd1498Szrj       edge_iterator ei;
906*38fd1498Szrj       int new_block = new_bb->index;
907*38fd1498Szrj 
908*38fd1498Szrj       gcc_assert (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb));
909*38fd1498Szrj 
910*38fd1498Szrj       IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[head_bb->index]);
911*38fd1498Szrj       bitmap_set_bit (live_range, new_block);
912*38fd1498Szrj       /* A previous btr migration could have caused a register to be
913*38fd1498Szrj 	live just at the end of new_block which we need in full, so
914*38fd1498Szrj 	use trs_live_at_end even if full_range is set.  */
915*38fd1498Szrj       IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live_at_end[new_block]);
916*38fd1498Szrj       if (full_range)
917*38fd1498Szrj 	IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_block]);
918*38fd1498Szrj       if (dump_file)
919*38fd1498Szrj 	{
920*38fd1498Szrj 	  fprintf (dump_file,
921*38fd1498Szrj 		   "Adding end of block %d and rest of %d to live range\n",
922*38fd1498Szrj 		   new_block, head_bb->index);
923*38fd1498Szrj 	  fprintf (dump_file,"Now live btrs are ");
924*38fd1498Szrj 	  dump_hard_reg_set (*btrs_live_in_range);
925*38fd1498Szrj 	  fprintf (dump_file, "\n");
926*38fd1498Szrj 	}
927*38fd1498Szrj       FOR_EACH_EDGE (e, ei, head_bb->preds)
928*38fd1498Szrj 	*tos++ = e->src;
929*38fd1498Szrj     }
930*38fd1498Szrj 
931*38fd1498Szrj   while (tos != worklist)
932*38fd1498Szrj     {
933*38fd1498Szrj       basic_block bb = *--tos;
934*38fd1498Szrj       if (!bitmap_bit_p (live_range, bb->index))
935*38fd1498Szrj 	{
936*38fd1498Szrj 	  edge e;
937*38fd1498Szrj 	  edge_iterator ei;
938*38fd1498Szrj 
939*38fd1498Szrj 	  bitmap_set_bit (live_range, bb->index);
940*38fd1498Szrj 	  IOR_HARD_REG_SET (*btrs_live_in_range,
941*38fd1498Szrj 	    btrs_live[bb->index]);
942*38fd1498Szrj 	  /* A previous btr migration could have caused a register to be
943*38fd1498Szrj 	     live just at the end of a block which we need in full.  */
944*38fd1498Szrj 	  IOR_HARD_REG_SET (*btrs_live_in_range,
945*38fd1498Szrj 	    btrs_live_at_end[bb->index]);
946*38fd1498Szrj 	  if (dump_file)
947*38fd1498Szrj 	    {
948*38fd1498Szrj 	      fprintf (dump_file,
949*38fd1498Szrj 		"Adding block %d to live range\n", bb->index);
950*38fd1498Szrj 	      fprintf (dump_file,"Now live btrs are ");
951*38fd1498Szrj 	      dump_hard_reg_set (*btrs_live_in_range);
952*38fd1498Szrj 	      fprintf (dump_file, "\n");
953*38fd1498Szrj 	    }
954*38fd1498Szrj 
955*38fd1498Szrj 	  FOR_EACH_EDGE (e, ei, bb->preds)
956*38fd1498Szrj 	    {
957*38fd1498Szrj 	      basic_block pred = e->src;
958*38fd1498Szrj 	      if (!bitmap_bit_p (live_range, pred->index))
959*38fd1498Szrj 		*tos++ = pred;
960*38fd1498Szrj 	    }
961*38fd1498Szrj 	}
962*38fd1498Szrj     }
963*38fd1498Szrj 
964*38fd1498Szrj   free (worklist);
965*38fd1498Szrj }
966*38fd1498Szrj 
967*38fd1498Szrj /*  Return the most desirable target register that is not in
968*38fd1498Szrj     the set USED_BTRS.  */
969*38fd1498Szrj static int
choose_btr(HARD_REG_SET used_btrs)970*38fd1498Szrj choose_btr (HARD_REG_SET used_btrs)
971*38fd1498Szrj {
972*38fd1498Szrj   int i;
973*38fd1498Szrj 
974*38fd1498Szrj   if (!hard_reg_set_subset_p (all_btrs, used_btrs))
975*38fd1498Szrj     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
976*38fd1498Szrj       {
977*38fd1498Szrj #ifdef REG_ALLOC_ORDER
978*38fd1498Szrj 	int regno = reg_alloc_order[i];
979*38fd1498Szrj #else
980*38fd1498Szrj 	int regno = i;
981*38fd1498Szrj #endif
982*38fd1498Szrj 	if (TEST_HARD_REG_BIT (all_btrs, regno)
983*38fd1498Szrj 	    && !TEST_HARD_REG_BIT (used_btrs, regno))
984*38fd1498Szrj 	  return regno;
985*38fd1498Szrj       }
986*38fd1498Szrj   return -1;
987*38fd1498Szrj }
988*38fd1498Szrj 
989*38fd1498Szrj /* Calculate the set of basic blocks that contain the live range of
990*38fd1498Szrj    the def/use web DEF.
991*38fd1498Szrj    Also calculate the set of target registers that are live at time
992*38fd1498Szrj    in this live range, but ignore the live range represented by DEF
993*38fd1498Szrj    when calculating this set.  */
994*38fd1498Szrj static void
btr_def_live_range(btr_def * def,HARD_REG_SET * btrs_live_in_range)995*38fd1498Szrj btr_def_live_range (btr_def *def, HARD_REG_SET *btrs_live_in_range)
996*38fd1498Szrj {
997*38fd1498Szrj   if (!def->live_range)
998*38fd1498Szrj     {
999*38fd1498Szrj       btr_user *user;
1000*38fd1498Szrj 
1001*38fd1498Szrj       def->live_range = BITMAP_ALLOC (NULL);
1002*38fd1498Szrj 
1003*38fd1498Szrj       bitmap_set_bit (def->live_range, def->bb->index);
1004*38fd1498Szrj       COPY_HARD_REG_SET (*btrs_live_in_range,
1005*38fd1498Szrj 			 (flag_btr_bb_exclusive
1006*38fd1498Szrj 			  ? btrs_live : btrs_live_at_end)[def->bb->index]);
1007*38fd1498Szrj 
1008*38fd1498Szrj       for (user = def->uses; user != NULL; user = user->next)
1009*38fd1498Szrj 	augment_live_range (def->live_range, btrs_live_in_range,
1010*38fd1498Szrj 			    def->bb, user->bb,
1011*38fd1498Szrj 			    (flag_btr_bb_exclusive
1012*38fd1498Szrj 			     || user->insn != BB_END (def->bb)
1013*38fd1498Szrj 			     || !JUMP_P (user->insn)));
1014*38fd1498Szrj     }
1015*38fd1498Szrj   else
1016*38fd1498Szrj     {
1017*38fd1498Szrj       /* def->live_range is accurate, but we need to recompute
1018*38fd1498Szrj 	 the set of target registers live over it, because migration
1019*38fd1498Szrj 	 of other PT instructions may have affected it.
1020*38fd1498Szrj       */
1021*38fd1498Szrj       unsigned bb;
1022*38fd1498Szrj       unsigned def_bb = flag_btr_bb_exclusive ? -1 : def->bb->index;
1023*38fd1498Szrj       bitmap_iterator bi;
1024*38fd1498Szrj 
1025*38fd1498Szrj       CLEAR_HARD_REG_SET (*btrs_live_in_range);
1026*38fd1498Szrj       EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
1027*38fd1498Szrj 	{
1028*38fd1498Szrj 	  IOR_HARD_REG_SET (*btrs_live_in_range,
1029*38fd1498Szrj 			    (def_bb == bb
1030*38fd1498Szrj 			     ? btrs_live_at_end : btrs_live) [bb]);
1031*38fd1498Szrj 	}
1032*38fd1498Szrj     }
1033*38fd1498Szrj   if (!def->other_btr_uses_before_def &&
1034*38fd1498Szrj       !def->other_btr_uses_after_use)
1035*38fd1498Szrj     CLEAR_HARD_REG_BIT (*btrs_live_in_range, def->btr);
1036*38fd1498Szrj }
1037*38fd1498Szrj 
1038*38fd1498Szrj /* Merge into the def/use web DEF any other def/use webs in the same
1039*38fd1498Szrj    group that are dominated by DEF, provided that there is a target
1040*38fd1498Szrj    register available to allocate to the merged web.  */
1041*38fd1498Szrj static void
combine_btr_defs(btr_def * def,HARD_REG_SET * btrs_live_in_range)1042*38fd1498Szrj combine_btr_defs (btr_def *def, HARD_REG_SET *btrs_live_in_range)
1043*38fd1498Szrj {
1044*38fd1498Szrj   btr_def *other_def;
1045*38fd1498Szrj 
1046*38fd1498Szrj   for (other_def = def->group->members;
1047*38fd1498Szrj        other_def != NULL;
1048*38fd1498Szrj        other_def = other_def->next_this_group)
1049*38fd1498Szrj     {
1050*38fd1498Szrj       if (other_def != def
1051*38fd1498Szrj 	  && other_def->uses != NULL
1052*38fd1498Szrj 	  && ! other_def->has_ambiguous_use
1053*38fd1498Szrj 	  && dominated_by_p (CDI_DOMINATORS, other_def->bb, def->bb))
1054*38fd1498Szrj 	{
1055*38fd1498Szrj 	  /* def->bb dominates the other def, so def and other_def could
1056*38fd1498Szrj 	     be combined.  */
1057*38fd1498Szrj 	  /* Merge their live ranges, and get the set of
1058*38fd1498Szrj 	     target registers live over the merged range.  */
1059*38fd1498Szrj 	  int btr;
1060*38fd1498Szrj 	  HARD_REG_SET combined_btrs_live;
1061*38fd1498Szrj 	  auto_bitmap combined_live_range;
1062*38fd1498Szrj 	  btr_user *user;
1063*38fd1498Szrj 
1064*38fd1498Szrj 	  if (other_def->live_range == NULL)
1065*38fd1498Szrj 	    {
1066*38fd1498Szrj 	      HARD_REG_SET dummy_btrs_live_in_range;
1067*38fd1498Szrj 	      btr_def_live_range (other_def, &dummy_btrs_live_in_range);
1068*38fd1498Szrj 	    }
1069*38fd1498Szrj 	  COPY_HARD_REG_SET (combined_btrs_live, *btrs_live_in_range);
1070*38fd1498Szrj 	  bitmap_copy (combined_live_range, def->live_range);
1071*38fd1498Szrj 
1072*38fd1498Szrj 	  for (user = other_def->uses; user != NULL; user = user->next)
1073*38fd1498Szrj 	    augment_live_range (combined_live_range, &combined_btrs_live,
1074*38fd1498Szrj 				def->bb, user->bb,
1075*38fd1498Szrj 				(flag_btr_bb_exclusive
1076*38fd1498Szrj 				 || user->insn != BB_END (def->bb)
1077*38fd1498Szrj 				 || !JUMP_P (user->insn)));
1078*38fd1498Szrj 
1079*38fd1498Szrj 	  btr = choose_btr (combined_btrs_live);
1080*38fd1498Szrj 	  if (btr != -1)
1081*38fd1498Szrj 	    {
1082*38fd1498Szrj 	      /* We can combine them.  */
1083*38fd1498Szrj 	      if (dump_file)
1084*38fd1498Szrj 		fprintf (dump_file,
1085*38fd1498Szrj 			 "Combining def in insn %d with def in insn %d\n",
1086*38fd1498Szrj 			 INSN_UID (other_def->insn), INSN_UID (def->insn));
1087*38fd1498Szrj 
1088*38fd1498Szrj 	      def->btr = btr;
1089*38fd1498Szrj 	      user = other_def->uses;
1090*38fd1498Szrj 	      while (user != NULL)
1091*38fd1498Szrj 		{
1092*38fd1498Szrj 		  btr_user *next = user->next;
1093*38fd1498Szrj 
1094*38fd1498Szrj 		  user->next = def->uses;
1095*38fd1498Szrj 		  def->uses = user;
1096*38fd1498Szrj 		  user = next;
1097*38fd1498Szrj 		}
1098*38fd1498Szrj 	      /* Combining def/use webs can make target registers live
1099*38fd1498Szrj 		 after uses where they previously were not.  This means
1100*38fd1498Szrj 		 some REG_DEAD notes may no longer be correct.  We could
1101*38fd1498Szrj 		 be more precise about this if we looked at the combined
1102*38fd1498Szrj 		 live range, but here I just delete any REG_DEAD notes
1103*38fd1498Szrj 		 in case they are no longer correct.  */
1104*38fd1498Szrj 	      for (user = def->uses; user != NULL; user = user->next)
1105*38fd1498Szrj 		remove_note (user->insn,
1106*38fd1498Szrj 			     find_regno_note (user->insn, REG_DEAD,
1107*38fd1498Szrj 					      REGNO (user->use)));
1108*38fd1498Szrj 	      clear_btr_from_live_range (other_def);
1109*38fd1498Szrj 	      other_def->uses = NULL;
1110*38fd1498Szrj 	      bitmap_copy (def->live_range, combined_live_range);
1111*38fd1498Szrj 	      if (other_def->btr == btr && other_def->other_btr_uses_after_use)
1112*38fd1498Szrj 		def->other_btr_uses_after_use = 1;
1113*38fd1498Szrj 	      COPY_HARD_REG_SET (*btrs_live_in_range, combined_btrs_live);
1114*38fd1498Szrj 
1115*38fd1498Szrj 	      /* Delete the old target register initialization.  */
1116*38fd1498Szrj 	      delete_insn (other_def->insn);
1117*38fd1498Szrj 
1118*38fd1498Szrj 	    }
1119*38fd1498Szrj 	}
1120*38fd1498Szrj     }
1121*38fd1498Szrj }
1122*38fd1498Szrj 
1123*38fd1498Szrj /* Move the definition DEF from its current position to basic
1124*38fd1498Szrj    block NEW_DEF_BB, and modify it to use branch target register BTR.
1125*38fd1498Szrj    Delete the old defining insn, and insert a new one in NEW_DEF_BB.
1126*38fd1498Szrj    Update all reaching uses of DEF in the RTL to use BTR.
1127*38fd1498Szrj    If this new position means that other defs in the
1128*38fd1498Szrj    same group can be combined with DEF then combine them.  */
1129*38fd1498Szrj static void
move_btr_def(basic_block new_def_bb,int btr,btr_def * def,bitmap live_range,HARD_REG_SET * btrs_live_in_range)1130*38fd1498Szrj move_btr_def (basic_block new_def_bb, int btr, btr_def *def, bitmap live_range,
1131*38fd1498Szrj 	     HARD_REG_SET *btrs_live_in_range)
1132*38fd1498Szrj {
1133*38fd1498Szrj   /* We can move the instruction.
1134*38fd1498Szrj      Set a target register in block NEW_DEF_BB to the value
1135*38fd1498Szrj      needed for this target register definition.
1136*38fd1498Szrj      Replace all uses of the old target register definition by
1137*38fd1498Szrj      uses of the new definition.  Delete the old definition.  */
1138*38fd1498Szrj   basic_block b = new_def_bb;
1139*38fd1498Szrj   rtx_insn *insp = BB_HEAD (b);
1140*38fd1498Szrj   rtx_insn *old_insn = def->insn;
1141*38fd1498Szrj   rtx src;
1142*38fd1498Szrj   rtx btr_rtx;
1143*38fd1498Szrj   rtx_insn *new_insn;
1144*38fd1498Szrj   machine_mode btr_mode;
1145*38fd1498Szrj   btr_user *user;
1146*38fd1498Szrj   rtx set;
1147*38fd1498Szrj 
1148*38fd1498Szrj   if (dump_file)
1149*38fd1498Szrj     fprintf(dump_file, "migrating to basic block %d, using reg %d\n",
1150*38fd1498Szrj 	    new_def_bb->index, btr);
1151*38fd1498Szrj 
1152*38fd1498Szrj   clear_btr_from_live_range (def);
1153*38fd1498Szrj   def->btr = btr;
1154*38fd1498Szrj   def->bb = new_def_bb;
1155*38fd1498Szrj   def->luid = 0;
1156*38fd1498Szrj   def->cost = basic_block_freq (new_def_bb);
1157*38fd1498Szrj   bitmap_copy (def->live_range, live_range);
1158*38fd1498Szrj   combine_btr_defs (def, btrs_live_in_range);
1159*38fd1498Szrj   btr = def->btr;
1160*38fd1498Szrj   def->other_btr_uses_before_def
1161*38fd1498Szrj     = TEST_HARD_REG_BIT (btrs_live[b->index], btr) ? 1 : 0;
1162*38fd1498Szrj   add_btr_to_live_range (def, 1);
1163*38fd1498Szrj   if (LABEL_P (insp))
1164*38fd1498Szrj     insp = NEXT_INSN (insp);
1165*38fd1498Szrj   /* N.B.: insp is expected to be NOTE_INSN_BASIC_BLOCK now.  Some
1166*38fd1498Szrj      optimizations can result in insp being both first and last insn of
1167*38fd1498Szrj      its basic block.  */
1168*38fd1498Szrj   /* ?? some assertions to check that insp is sensible? */
1169*38fd1498Szrj 
1170*38fd1498Szrj   if (def->other_btr_uses_before_def)
1171*38fd1498Szrj     {
1172*38fd1498Szrj       insp = BB_END (b);
1173*38fd1498Szrj       for (insp = BB_END (b); ! INSN_P (insp); insp = PREV_INSN (insp))
1174*38fd1498Szrj 	gcc_assert (insp != BB_HEAD (b));
1175*38fd1498Szrj 
1176*38fd1498Szrj       if (JUMP_P (insp) || can_throw_internal (insp))
1177*38fd1498Szrj 	insp = PREV_INSN (insp);
1178*38fd1498Szrj     }
1179*38fd1498Szrj 
1180*38fd1498Szrj   set = single_set (old_insn);
1181*38fd1498Szrj   src = SET_SRC (set);
1182*38fd1498Szrj   btr_mode = GET_MODE (SET_DEST (set));
1183*38fd1498Szrj   btr_rtx = gen_rtx_REG (btr_mode, btr);
1184*38fd1498Szrj 
1185*38fd1498Szrj   new_insn = gen_move_insn (btr_rtx, src);
1186*38fd1498Szrj 
1187*38fd1498Szrj   /* Insert target register initialization at head of basic block.  */
1188*38fd1498Szrj   def->insn = emit_insn_after (new_insn, insp);
1189*38fd1498Szrj 
1190*38fd1498Szrj   df_set_regs_ever_live (btr, true);
1191*38fd1498Szrj 
1192*38fd1498Szrj   if (dump_file)
1193*38fd1498Szrj     fprintf (dump_file, "New pt is insn %d, inserted after insn %d\n",
1194*38fd1498Szrj 	     INSN_UID (def->insn), INSN_UID (insp));
1195*38fd1498Szrj 
1196*38fd1498Szrj   /* Delete the old target register initialization.  */
1197*38fd1498Szrj   delete_insn (old_insn);
1198*38fd1498Szrj 
1199*38fd1498Szrj   /* Replace each use of the old target register by a use of the new target
1200*38fd1498Szrj      register.  */
1201*38fd1498Szrj   for (user = def->uses; user != NULL; user = user->next)
1202*38fd1498Szrj     {
1203*38fd1498Szrj       /* Some extra work here to ensure consistent modes, because
1204*38fd1498Szrj 	 it seems that a target register REG rtx can be given a different
1205*38fd1498Szrj 	 mode depending on the context (surely that should not be
1206*38fd1498Szrj 	 the case?).  */
1207*38fd1498Szrj       rtx replacement_rtx;
1208*38fd1498Szrj       if (GET_MODE (user->use) == GET_MODE (btr_rtx)
1209*38fd1498Szrj 	  || GET_MODE (user->use) == VOIDmode)
1210*38fd1498Szrj 	replacement_rtx = btr_rtx;
1211*38fd1498Szrj       else
1212*38fd1498Szrj 	replacement_rtx = gen_rtx_REG (GET_MODE (user->use), btr);
1213*38fd1498Szrj       validate_replace_rtx (user->use, replacement_rtx, user->insn);
1214*38fd1498Szrj       user->use = replacement_rtx;
1215*38fd1498Szrj     }
1216*38fd1498Szrj }
1217*38fd1498Szrj 
1218*38fd1498Szrj /* We anticipate intra-block scheduling to be done.  See if INSN could move
1219*38fd1498Szrj    up within BB by N_INSNS.  */
1220*38fd1498Szrj static int
can_move_up(const_basic_block bb,const rtx_insn * insn,int n_insns)1221*38fd1498Szrj can_move_up (const_basic_block bb, const rtx_insn *insn, int n_insns)
1222*38fd1498Szrj {
1223*38fd1498Szrj   while (insn != BB_HEAD (bb) && n_insns > 0)
1224*38fd1498Szrj     {
1225*38fd1498Szrj       insn = PREV_INSN (insn);
1226*38fd1498Szrj       /* ??? What if we have an anti-dependency that actually prevents the
1227*38fd1498Szrj 	 scheduler from doing the move?  We'd like to re-allocate the register,
1228*38fd1498Szrj 	 but not necessarily put the load into another basic block.  */
1229*38fd1498Szrj       if (INSN_P (insn))
1230*38fd1498Szrj 	n_insns--;
1231*38fd1498Szrj     }
1232*38fd1498Szrj   return n_insns <= 0;
1233*38fd1498Szrj }
1234*38fd1498Szrj 
1235*38fd1498Szrj /* Attempt to migrate the target register definition DEF to an
1236*38fd1498Szrj    earlier point in the flowgraph.
1237*38fd1498Szrj 
1238*38fd1498Szrj    It is a precondition of this function that DEF is migratable:
1239*38fd1498Szrj    i.e. it has a constant source, and all uses are unambiguous.
1240*38fd1498Szrj 
1241*38fd1498Szrj    Only migrations that reduce the cost of DEF will be made.
1242*38fd1498Szrj    MIN_COST is the lower bound on the cost of the DEF after migration.
1243*38fd1498Szrj    If we migrate DEF so that its cost falls below MIN_COST,
1244*38fd1498Szrj    then we do not attempt to migrate further.  The idea is that
1245*38fd1498Szrj    we migrate definitions in a priority order based on their cost,
1246*38fd1498Szrj    when the cost of this definition falls below MIN_COST, then
1247*38fd1498Szrj    there is another definition with cost == MIN_COST which now
1248*38fd1498Szrj    has a higher priority than this definition.
1249*38fd1498Szrj 
1250*38fd1498Szrj    Return nonzero if there may be benefit from attempting to
1251*38fd1498Szrj    migrate this DEF further (i.e. we have reduced the cost below
1252*38fd1498Szrj    MIN_COST, but we may be able to reduce it further).
1253*38fd1498Szrj    Return zero if no further migration is possible.  */
1254*38fd1498Szrj static int
migrate_btr_def(btr_def * def,int min_cost)1255*38fd1498Szrj migrate_btr_def (btr_def *def, int min_cost)
1256*38fd1498Szrj {
1257*38fd1498Szrj   HARD_REG_SET btrs_live_in_range;
1258*38fd1498Szrj   int btr_used_near_def = 0;
1259*38fd1498Szrj   int def_basic_block_freq;
1260*38fd1498Szrj   basic_block attempt;
1261*38fd1498Szrj   int give_up = 0;
1262*38fd1498Szrj   int def_moved = 0;
1263*38fd1498Szrj   btr_user *user;
1264*38fd1498Szrj   int def_latency;
1265*38fd1498Szrj 
1266*38fd1498Szrj   if (dump_file)
1267*38fd1498Szrj     fprintf (dump_file,
1268*38fd1498Szrj 	     "Attempting to migrate pt from insn %d (cost = %d, min_cost = %d) ... ",
1269*38fd1498Szrj 	     INSN_UID (def->insn), def->cost, min_cost);
1270*38fd1498Szrj 
1271*38fd1498Szrj   if (!def->group || def->has_ambiguous_use)
1272*38fd1498Szrj     /* These defs are not migratable.  */
1273*38fd1498Szrj     {
1274*38fd1498Szrj       if (dump_file)
1275*38fd1498Szrj 	fprintf (dump_file, "it's not migratable\n");
1276*38fd1498Szrj       return 0;
1277*38fd1498Szrj     }
1278*38fd1498Szrj 
1279*38fd1498Szrj   if (!def->uses)
1280*38fd1498Szrj     /* We have combined this def with another in the same group, so
1281*38fd1498Szrj        no need to consider it further.
1282*38fd1498Szrj     */
1283*38fd1498Szrj     {
1284*38fd1498Szrj       if (dump_file)
1285*38fd1498Szrj 	fprintf (dump_file, "it's already combined with another pt\n");
1286*38fd1498Szrj       return 0;
1287*38fd1498Szrj     }
1288*38fd1498Szrj 
1289*38fd1498Szrj   btr_def_live_range (def, &btrs_live_in_range);
1290*38fd1498Szrj   auto_bitmap live_range;
1291*38fd1498Szrj   bitmap_copy (live_range, def->live_range);
1292*38fd1498Szrj 
1293*38fd1498Szrj #ifdef INSN_SCHEDULING
1294*38fd1498Szrj   def_latency = insn_default_latency (def->insn) * issue_rate;
1295*38fd1498Szrj #else
1296*38fd1498Szrj   def_latency = issue_rate;
1297*38fd1498Szrj #endif
1298*38fd1498Szrj 
1299*38fd1498Szrj   for (user = def->uses; user != NULL; user = user->next)
1300*38fd1498Szrj     {
1301*38fd1498Szrj       if (user->bb == def->bb
1302*38fd1498Szrj 	  && user->luid > def->luid
1303*38fd1498Szrj 	  && (def->luid + def_latency) > user->luid
1304*38fd1498Szrj 	  && ! can_move_up (def->bb, def->insn,
1305*38fd1498Szrj 			    (def->luid + def_latency) - user->luid))
1306*38fd1498Szrj 	{
1307*38fd1498Szrj 	  btr_used_near_def = 1;
1308*38fd1498Szrj 	  break;
1309*38fd1498Szrj 	}
1310*38fd1498Szrj     }
1311*38fd1498Szrj 
1312*38fd1498Szrj   def_basic_block_freq = basic_block_freq (def->bb);
1313*38fd1498Szrj 
1314*38fd1498Szrj   for (attempt = get_immediate_dominator (CDI_DOMINATORS, def->bb);
1315*38fd1498Szrj        !give_up && attempt && attempt != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1316*38fd1498Szrj        && def->cost >= min_cost;
1317*38fd1498Szrj        attempt = get_immediate_dominator (CDI_DOMINATORS, attempt))
1318*38fd1498Szrj     {
1319*38fd1498Szrj       /* Try to move the instruction that sets the target register into
1320*38fd1498Szrj 	 basic block ATTEMPT.  */
1321*38fd1498Szrj       int try_freq = basic_block_freq (attempt);
1322*38fd1498Szrj       edge_iterator ei;
1323*38fd1498Szrj       edge e;
1324*38fd1498Szrj 
1325*38fd1498Szrj       /* If ATTEMPT has abnormal edges, skip it.  */
1326*38fd1498Szrj       FOR_EACH_EDGE (e, ei, attempt->succs)
1327*38fd1498Szrj 	if (e->flags & EDGE_COMPLEX)
1328*38fd1498Szrj 	  break;
1329*38fd1498Szrj       if (e)
1330*38fd1498Szrj 	continue;
1331*38fd1498Szrj 
1332*38fd1498Szrj       if (dump_file)
1333*38fd1498Szrj 	fprintf (dump_file, "trying block %d ...", attempt->index);
1334*38fd1498Szrj 
1335*38fd1498Szrj       if (try_freq < def_basic_block_freq
1336*38fd1498Szrj 	  || (try_freq == def_basic_block_freq && btr_used_near_def))
1337*38fd1498Szrj 	{
1338*38fd1498Szrj 	  int btr;
1339*38fd1498Szrj 	  augment_live_range (live_range, &btrs_live_in_range, def->bb, attempt,
1340*38fd1498Szrj 			      flag_btr_bb_exclusive);
1341*38fd1498Szrj 	  if (dump_file)
1342*38fd1498Szrj 	    {
1343*38fd1498Szrj 	      fprintf (dump_file, "Now btrs live in range are: ");
1344*38fd1498Szrj 	      dump_hard_reg_set (btrs_live_in_range);
1345*38fd1498Szrj 	      fprintf (dump_file, "\n");
1346*38fd1498Szrj 	    }
1347*38fd1498Szrj 	  btr = choose_btr (btrs_live_in_range);
1348*38fd1498Szrj 	  if (btr != -1)
1349*38fd1498Szrj 	    {
1350*38fd1498Szrj 	      move_btr_def (attempt, btr, def, live_range, &btrs_live_in_range);
1351*38fd1498Szrj 	      bitmap_copy (live_range, def->live_range);
1352*38fd1498Szrj 	      btr_used_near_def = 0;
1353*38fd1498Szrj 	      def_moved = 1;
1354*38fd1498Szrj 	      def_basic_block_freq = basic_block_freq (def->bb);
1355*38fd1498Szrj 	    }
1356*38fd1498Szrj 	  else
1357*38fd1498Szrj 	    {
1358*38fd1498Szrj 	      /* There are no free target registers available to move
1359*38fd1498Szrj 		 this far forward, so give up */
1360*38fd1498Szrj 	      give_up = 1;
1361*38fd1498Szrj 	      if (dump_file)
1362*38fd1498Szrj 		fprintf (dump_file,
1363*38fd1498Szrj 			 "giving up because there are no free target registers\n");
1364*38fd1498Szrj 	    }
1365*38fd1498Szrj 
1366*38fd1498Szrj 	}
1367*38fd1498Szrj     }
1368*38fd1498Szrj   if (!def_moved)
1369*38fd1498Szrj     {
1370*38fd1498Szrj       give_up = 1;
1371*38fd1498Szrj       if (dump_file)
1372*38fd1498Szrj 	fprintf (dump_file, "failed to move\n");
1373*38fd1498Szrj     }
1374*38fd1498Szrj 
1375*38fd1498Szrj   return !give_up;
1376*38fd1498Szrj }
1377*38fd1498Szrj 
1378*38fd1498Szrj /* Attempt to move instructions that set target registers earlier
1379*38fd1498Szrj    in the flowgraph, away from their corresponding uses.  */
1380*38fd1498Szrj static void
migrate_btr_defs(enum reg_class btr_class,int allow_callee_save)1381*38fd1498Szrj migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
1382*38fd1498Szrj {
1383*38fd1498Szrj   btr_heap_t all_btr_defs (LONG_MIN);
1384*38fd1498Szrj   int reg;
1385*38fd1498Szrj 
1386*38fd1498Szrj   gcc_obstack_init (&migrate_btrl_obstack);
1387*38fd1498Szrj   if (dump_file)
1388*38fd1498Szrj     {
1389*38fd1498Szrj       int i;
1390*38fd1498Szrj 
1391*38fd1498Szrj       for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
1392*38fd1498Szrj 	{
1393*38fd1498Szrj 	  basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1394*38fd1498Szrj 	  fprintf (dump_file, "Basic block %d: count = ", i);
1395*38fd1498Szrj 	  bb->count.dump (dump_file);
1396*38fd1498Szrj 	  fprintf (dump_file, " loop-depth = %d idom = %d\n",
1397*38fd1498Szrj 		   bb_loop_depth (bb),
1398*38fd1498Szrj 		   get_immediate_dominator (CDI_DOMINATORS, bb)->index);
1399*38fd1498Szrj 	}
1400*38fd1498Szrj     }
1401*38fd1498Szrj 
1402*38fd1498Szrj   CLEAR_HARD_REG_SET (all_btrs);
1403*38fd1498Szrj   for (first_btr = -1, reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
1404*38fd1498Szrj     if (TEST_HARD_REG_BIT (reg_class_contents[(int) btr_class], reg)
1405*38fd1498Szrj 	&& (allow_callee_save || call_used_regs[reg]
1406*38fd1498Szrj 	    || df_regs_ever_live_p (reg)))
1407*38fd1498Szrj       {
1408*38fd1498Szrj 	SET_HARD_REG_BIT (all_btrs, reg);
1409*38fd1498Szrj 	last_btr = reg;
1410*38fd1498Szrj 	if (first_btr < 0)
1411*38fd1498Szrj 	  first_btr = reg;
1412*38fd1498Szrj       }
1413*38fd1498Szrj 
1414*38fd1498Szrj   btrs_live = XCNEWVEC (HARD_REG_SET, last_basic_block_for_fn (cfun));
1415*38fd1498Szrj   btrs_live_at_end = XCNEWVEC (HARD_REG_SET, last_basic_block_for_fn (cfun));
1416*38fd1498Szrj 
1417*38fd1498Szrj   build_btr_def_use_webs (&all_btr_defs);
1418*38fd1498Szrj 
1419*38fd1498Szrj   while (!all_btr_defs.empty ())
1420*38fd1498Szrj     {
1421*38fd1498Szrj       int min_cost = -all_btr_defs.min_key ();
1422*38fd1498Szrj       btr_def *def = all_btr_defs.extract_min ();
1423*38fd1498Szrj       if (migrate_btr_def (def, min_cost))
1424*38fd1498Szrj 	{
1425*38fd1498Szrj 	  all_btr_defs.insert (-def->cost, def);
1426*38fd1498Szrj 	  if (dump_file)
1427*38fd1498Szrj 	    {
1428*38fd1498Szrj 	      fprintf (dump_file,
1429*38fd1498Szrj 		"Putting insn %d back on queue with priority %d\n",
1430*38fd1498Szrj 		INSN_UID (def->insn), def->cost);
1431*38fd1498Szrj 	    }
1432*38fd1498Szrj 	}
1433*38fd1498Szrj       else
1434*38fd1498Szrj 	BITMAP_FREE (def->live_range);
1435*38fd1498Szrj     }
1436*38fd1498Szrj 
1437*38fd1498Szrj   free (btrs_live);
1438*38fd1498Szrj   free (btrs_live_at_end);
1439*38fd1498Szrj   obstack_free (&migrate_btrl_obstack, NULL);
1440*38fd1498Szrj }
1441*38fd1498Szrj 
1442*38fd1498Szrj static void
branch_target_load_optimize(bool after_prologue_epilogue_gen)1443*38fd1498Szrj branch_target_load_optimize (bool after_prologue_epilogue_gen)
1444*38fd1498Szrj {
1445*38fd1498Szrj   enum reg_class klass
1446*38fd1498Szrj     = (enum reg_class) targetm.branch_target_register_class ();
1447*38fd1498Szrj   if (klass != NO_REGS)
1448*38fd1498Szrj     {
1449*38fd1498Szrj       /* Initialize issue_rate.  */
1450*38fd1498Szrj       if (targetm.sched.issue_rate)
1451*38fd1498Szrj 	issue_rate = targetm.sched.issue_rate ();
1452*38fd1498Szrj       else
1453*38fd1498Szrj 	issue_rate = 1;
1454*38fd1498Szrj 
1455*38fd1498Szrj       if (!after_prologue_epilogue_gen)
1456*38fd1498Szrj 	{
1457*38fd1498Szrj 	  /* Build the CFG for migrate_btr_defs.  */
1458*38fd1498Szrj #if 1
1459*38fd1498Szrj 	  /* This may or may not be needed, depending on where we
1460*38fd1498Szrj 	     run this phase.  */
1461*38fd1498Szrj 	  cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
1462*38fd1498Szrj #endif
1463*38fd1498Szrj 	}
1464*38fd1498Szrj       df_analyze ();
1465*38fd1498Szrj 
1466*38fd1498Szrj 
1467*38fd1498Szrj       /* Dominator info is also needed for migrate_btr_def.  */
1468*38fd1498Szrj       calculate_dominance_info (CDI_DOMINATORS);
1469*38fd1498Szrj       migrate_btr_defs (klass,
1470*38fd1498Szrj 		       (targetm.branch_target_register_callee_saved
1471*38fd1498Szrj 			(after_prologue_epilogue_gen)));
1472*38fd1498Szrj 
1473*38fd1498Szrj       free_dominance_info (CDI_DOMINATORS);
1474*38fd1498Szrj     }
1475*38fd1498Szrj }
1476*38fd1498Szrj 
1477*38fd1498Szrj namespace {
1478*38fd1498Szrj 
1479*38fd1498Szrj const pass_data pass_data_branch_target_load_optimize1 =
1480*38fd1498Szrj {
1481*38fd1498Szrj   RTL_PASS, /* type */
1482*38fd1498Szrj   "btl1", /* name */
1483*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
1484*38fd1498Szrj   TV_NONE, /* tv_id */
1485*38fd1498Szrj   0, /* properties_required */
1486*38fd1498Szrj   0, /* properties_provided */
1487*38fd1498Szrj   0, /* properties_destroyed */
1488*38fd1498Szrj   0, /* todo_flags_start */
1489*38fd1498Szrj   0, /* todo_flags_finish */
1490*38fd1498Szrj };
1491*38fd1498Szrj 
1492*38fd1498Szrj class pass_branch_target_load_optimize1 : public rtl_opt_pass
1493*38fd1498Szrj {
1494*38fd1498Szrj public:
pass_branch_target_load_optimize1(gcc::context * ctxt)1495*38fd1498Szrj   pass_branch_target_load_optimize1 (gcc::context *ctxt)
1496*38fd1498Szrj     : rtl_opt_pass (pass_data_branch_target_load_optimize1, ctxt)
1497*38fd1498Szrj   {}
1498*38fd1498Szrj 
1499*38fd1498Szrj   /* opt_pass methods: */
gate(function *)1500*38fd1498Szrj   virtual bool gate (function *) { return flag_branch_target_load_optimize; }
execute(function *)1501*38fd1498Szrj   virtual unsigned int execute (function *)
1502*38fd1498Szrj     {
1503*38fd1498Szrj       branch_target_load_optimize (epilogue_completed);
1504*38fd1498Szrj       return 0;
1505*38fd1498Szrj     }
1506*38fd1498Szrj 
1507*38fd1498Szrj }; // class pass_branch_target_load_optimize1
1508*38fd1498Szrj 
1509*38fd1498Szrj } // anon namespace
1510*38fd1498Szrj 
1511*38fd1498Szrj rtl_opt_pass *
make_pass_branch_target_load_optimize1(gcc::context * ctxt)1512*38fd1498Szrj make_pass_branch_target_load_optimize1 (gcc::context *ctxt)
1513*38fd1498Szrj {
1514*38fd1498Szrj   return new pass_branch_target_load_optimize1 (ctxt);
1515*38fd1498Szrj }
1516*38fd1498Szrj 
1517*38fd1498Szrj 
1518*38fd1498Szrj namespace {
1519*38fd1498Szrj 
1520*38fd1498Szrj const pass_data pass_data_branch_target_load_optimize2 =
1521*38fd1498Szrj {
1522*38fd1498Szrj   RTL_PASS, /* type */
1523*38fd1498Szrj   "btl2", /* name */
1524*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
1525*38fd1498Szrj   TV_NONE, /* tv_id */
1526*38fd1498Szrj   0, /* properties_required */
1527*38fd1498Szrj   0, /* properties_provided */
1528*38fd1498Szrj   0, /* properties_destroyed */
1529*38fd1498Szrj   0, /* todo_flags_start */
1530*38fd1498Szrj   0, /* todo_flags_finish */
1531*38fd1498Szrj };
1532*38fd1498Szrj 
1533*38fd1498Szrj class pass_branch_target_load_optimize2 : public rtl_opt_pass
1534*38fd1498Szrj {
1535*38fd1498Szrj public:
pass_branch_target_load_optimize2(gcc::context * ctxt)1536*38fd1498Szrj   pass_branch_target_load_optimize2 (gcc::context *ctxt)
1537*38fd1498Szrj     : rtl_opt_pass (pass_data_branch_target_load_optimize2, ctxt)
1538*38fd1498Szrj   {}
1539*38fd1498Szrj 
1540*38fd1498Szrj   /* opt_pass methods: */
gate(function *)1541*38fd1498Szrj   virtual bool gate (function *)
1542*38fd1498Szrj     {
1543*38fd1498Szrj       return (optimize > 0 && flag_branch_target_load_optimize2);
1544*38fd1498Szrj     }
1545*38fd1498Szrj 
1546*38fd1498Szrj   virtual unsigned int execute (function *);
1547*38fd1498Szrj 
1548*38fd1498Szrj }; // class pass_branch_target_load_optimize2
1549*38fd1498Szrj 
1550*38fd1498Szrj unsigned int
execute(function *)1551*38fd1498Szrj pass_branch_target_load_optimize2::execute (function *)
1552*38fd1498Szrj {
1553*38fd1498Szrj   static int warned = 0;
1554*38fd1498Szrj 
1555*38fd1498Szrj   /* Leave this a warning for now so that it is possible to experiment
1556*38fd1498Szrj      with running this pass twice.  In 3.6, we should either make this
1557*38fd1498Szrj      an error, or use separate dump files.  */
1558*38fd1498Szrj   if (flag_branch_target_load_optimize
1559*38fd1498Szrj       && flag_branch_target_load_optimize2
1560*38fd1498Szrj       && !warned)
1561*38fd1498Szrj     {
1562*38fd1498Szrj       warning (0, "branch target register load optimization is not intended "
1563*38fd1498Szrj 	       "to be run twice");
1564*38fd1498Szrj 
1565*38fd1498Szrj       warned = 1;
1566*38fd1498Szrj     }
1567*38fd1498Szrj 
1568*38fd1498Szrj   branch_target_load_optimize (epilogue_completed);
1569*38fd1498Szrj   return 0;
1570*38fd1498Szrj }
1571*38fd1498Szrj 
1572*38fd1498Szrj } // anon namespace
1573*38fd1498Szrj 
1574*38fd1498Szrj rtl_opt_pass *
make_pass_branch_target_load_optimize2(gcc::context * ctxt)1575*38fd1498Szrj make_pass_branch_target_load_optimize2 (gcc::context *ctxt)
1576*38fd1498Szrj {
1577*38fd1498Szrj   return new pass_branch_target_load_optimize2 (ctxt);
1578*38fd1498Szrj }
1579