1*e4b17023SJohn Marino /* CPU mode switching
2*e4b17023SJohn Marino Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008,
3*e4b17023SJohn Marino 2009, 2010 Free Software Foundation, Inc.
4*e4b17023SJohn Marino
5*e4b17023SJohn Marino This file is part of GCC.
6*e4b17023SJohn Marino
7*e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify it under
8*e4b17023SJohn Marino the terms of the GNU General Public License as published by the Free
9*e4b17023SJohn Marino Software Foundation; either version 3, or (at your option) any later
10*e4b17023SJohn Marino version.
11*e4b17023SJohn Marino
12*e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13*e4b17023SJohn Marino WARRANTY; without even the implied warranty of MERCHANTABILITY or
14*e4b17023SJohn Marino FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15*e4b17023SJohn Marino for more details.
16*e4b17023SJohn Marino
17*e4b17023SJohn Marino You should have received a copy of the GNU General Public License
18*e4b17023SJohn Marino along with GCC; see the file COPYING3. If not see
19*e4b17023SJohn Marino <http://www.gnu.org/licenses/>. */
20*e4b17023SJohn Marino
21*e4b17023SJohn Marino #include "config.h"
22*e4b17023SJohn Marino #include "system.h"
23*e4b17023SJohn Marino #include "coretypes.h"
24*e4b17023SJohn Marino #include "tm.h"
25*e4b17023SJohn Marino #include "target.h"
26*e4b17023SJohn Marino #include "rtl.h"
27*e4b17023SJohn Marino #include "regs.h"
28*e4b17023SJohn Marino #include "hard-reg-set.h"
29*e4b17023SJohn Marino #include "flags.h"
30*e4b17023SJohn Marino #include "insn-config.h"
31*e4b17023SJohn Marino #include "recog.h"
32*e4b17023SJohn Marino #include "basic-block.h"
33*e4b17023SJohn Marino #include "output.h"
34*e4b17023SJohn Marino #include "tm_p.h"
35*e4b17023SJohn Marino #include "function.h"
36*e4b17023SJohn Marino #include "tree-pass.h"
37*e4b17023SJohn Marino #include "timevar.h"
38*e4b17023SJohn Marino #include "df.h"
39*e4b17023SJohn Marino #include "emit-rtl.h"
40*e4b17023SJohn Marino
41*e4b17023SJohn Marino /* We want target macros for the mode switching code to be able to refer
42*e4b17023SJohn Marino to instruction attribute values. */
43*e4b17023SJohn Marino #include "insn-attr.h"
44*e4b17023SJohn Marino
45*e4b17023SJohn Marino #ifdef OPTIMIZE_MODE_SWITCHING
46*e4b17023SJohn Marino
47*e4b17023SJohn Marino /* The algorithm for setting the modes consists of scanning the insn list
48*e4b17023SJohn Marino and finding all the insns which require a specific mode. Each insn gets
49*e4b17023SJohn Marino a unique struct seginfo element. These structures are inserted into a list
50*e4b17023SJohn Marino for each basic block. For each entity, there is an array of bb_info over
51*e4b17023SJohn Marino the flow graph basic blocks (local var 'bb_info'), and contains a list
52*e4b17023SJohn Marino of all insns within that basic block, in the order they are encountered.
53*e4b17023SJohn Marino
54*e4b17023SJohn Marino For each entity, any basic block WITHOUT any insns requiring a specific
55*e4b17023SJohn Marino mode are given a single entry, without a mode. (Each basic block
56*e4b17023SJohn Marino in the flow graph must have at least one entry in the segment table.)
57*e4b17023SJohn Marino
58*e4b17023SJohn Marino The LCM algorithm is then run over the flow graph to determine where to
59*e4b17023SJohn Marino place the sets to the highest-priority value in respect of first the first
60*e4b17023SJohn Marino insn in any one block. Any adjustments required to the transparency
61*e4b17023SJohn Marino vectors are made, then the next iteration starts for the next-lower
62*e4b17023SJohn Marino priority mode, till for each entity all modes are exhausted.
63*e4b17023SJohn Marino
64*e4b17023SJohn Marino More details are located in the code for optimize_mode_switching(). */
65*e4b17023SJohn Marino
66*e4b17023SJohn Marino /* This structure contains the information for each insn which requires
67*e4b17023SJohn Marino either single or double mode to be set.
68*e4b17023SJohn Marino MODE is the mode this insn must be executed in.
69*e4b17023SJohn Marino INSN_PTR is the insn to be executed (may be the note that marks the
70*e4b17023SJohn Marino beginning of a basic block).
71*e4b17023SJohn Marino BBNUM is the flow graph basic block this insn occurs in.
72*e4b17023SJohn Marino NEXT is the next insn in the same basic block. */
73*e4b17023SJohn Marino struct seginfo
74*e4b17023SJohn Marino {
75*e4b17023SJohn Marino int mode;
76*e4b17023SJohn Marino rtx insn_ptr;
77*e4b17023SJohn Marino int bbnum;
78*e4b17023SJohn Marino struct seginfo *next;
79*e4b17023SJohn Marino HARD_REG_SET regs_live;
80*e4b17023SJohn Marino };
81*e4b17023SJohn Marino
82*e4b17023SJohn Marino struct bb_info
83*e4b17023SJohn Marino {
84*e4b17023SJohn Marino struct seginfo *seginfo;
85*e4b17023SJohn Marino int computing;
86*e4b17023SJohn Marino };
87*e4b17023SJohn Marino
88*e4b17023SJohn Marino /* These bitmaps are used for the LCM algorithm. */
89*e4b17023SJohn Marino
90*e4b17023SJohn Marino static sbitmap *antic;
91*e4b17023SJohn Marino static sbitmap *transp;
92*e4b17023SJohn Marino static sbitmap *comp;
93*e4b17023SJohn Marino
94*e4b17023SJohn Marino static struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET);
95*e4b17023SJohn Marino static void add_seginfo (struct bb_info *, struct seginfo *);
96*e4b17023SJohn Marino static void reg_dies (rtx, HARD_REG_SET *);
97*e4b17023SJohn Marino static void reg_becomes_live (rtx, const_rtx, void *);
98*e4b17023SJohn Marino static void make_preds_opaque (basic_block, int);
99*e4b17023SJohn Marino
100*e4b17023SJohn Marino
101*e4b17023SJohn Marino /* This function will allocate a new BBINFO structure, initialized
102*e4b17023SJohn Marino with the MODE, INSN, and basic block BB parameters. */
103*e4b17023SJohn Marino
104*e4b17023SJohn Marino static struct seginfo *
new_seginfo(int mode,rtx insn,int bb,HARD_REG_SET regs_live)105*e4b17023SJohn Marino new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live)
106*e4b17023SJohn Marino {
107*e4b17023SJohn Marino struct seginfo *ptr;
108*e4b17023SJohn Marino ptr = XNEW (struct seginfo);
109*e4b17023SJohn Marino ptr->mode = mode;
110*e4b17023SJohn Marino ptr->insn_ptr = insn;
111*e4b17023SJohn Marino ptr->bbnum = bb;
112*e4b17023SJohn Marino ptr->next = NULL;
113*e4b17023SJohn Marino COPY_HARD_REG_SET (ptr->regs_live, regs_live);
114*e4b17023SJohn Marino return ptr;
115*e4b17023SJohn Marino }
116*e4b17023SJohn Marino
117*e4b17023SJohn Marino /* Add a seginfo element to the end of a list.
118*e4b17023SJohn Marino HEAD is a pointer to the list beginning.
119*e4b17023SJohn Marino INFO is the structure to be linked in. */
120*e4b17023SJohn Marino
121*e4b17023SJohn Marino static void
add_seginfo(struct bb_info * head,struct seginfo * info)122*e4b17023SJohn Marino add_seginfo (struct bb_info *head, struct seginfo *info)
123*e4b17023SJohn Marino {
124*e4b17023SJohn Marino struct seginfo *ptr;
125*e4b17023SJohn Marino
126*e4b17023SJohn Marino if (head->seginfo == NULL)
127*e4b17023SJohn Marino head->seginfo = info;
128*e4b17023SJohn Marino else
129*e4b17023SJohn Marino {
130*e4b17023SJohn Marino ptr = head->seginfo;
131*e4b17023SJohn Marino while (ptr->next != NULL)
132*e4b17023SJohn Marino ptr = ptr->next;
133*e4b17023SJohn Marino ptr->next = info;
134*e4b17023SJohn Marino }
135*e4b17023SJohn Marino }
136*e4b17023SJohn Marino
137*e4b17023SJohn Marino /* Make all predecessors of basic block B opaque, recursively, till we hit
138*e4b17023SJohn Marino some that are already non-transparent, or an edge where aux is set; that
139*e4b17023SJohn Marino denotes that a mode set is to be done on that edge.
140*e4b17023SJohn Marino J is the bit number in the bitmaps that corresponds to the entity that
141*e4b17023SJohn Marino we are currently handling mode-switching for. */
142*e4b17023SJohn Marino
143*e4b17023SJohn Marino static void
make_preds_opaque(basic_block b,int j)144*e4b17023SJohn Marino make_preds_opaque (basic_block b, int j)
145*e4b17023SJohn Marino {
146*e4b17023SJohn Marino edge e;
147*e4b17023SJohn Marino edge_iterator ei;
148*e4b17023SJohn Marino
149*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, b->preds)
150*e4b17023SJohn Marino {
151*e4b17023SJohn Marino basic_block pb = e->src;
152*e4b17023SJohn Marino
153*e4b17023SJohn Marino if (e->aux || ! TEST_BIT (transp[pb->index], j))
154*e4b17023SJohn Marino continue;
155*e4b17023SJohn Marino
156*e4b17023SJohn Marino RESET_BIT (transp[pb->index], j);
157*e4b17023SJohn Marino make_preds_opaque (pb, j);
158*e4b17023SJohn Marino }
159*e4b17023SJohn Marino }
160*e4b17023SJohn Marino
161*e4b17023SJohn Marino /* Record in LIVE that register REG died. */
162*e4b17023SJohn Marino
163*e4b17023SJohn Marino static void
reg_dies(rtx reg,HARD_REG_SET * live)164*e4b17023SJohn Marino reg_dies (rtx reg, HARD_REG_SET *live)
165*e4b17023SJohn Marino {
166*e4b17023SJohn Marino int regno;
167*e4b17023SJohn Marino
168*e4b17023SJohn Marino if (!REG_P (reg))
169*e4b17023SJohn Marino return;
170*e4b17023SJohn Marino
171*e4b17023SJohn Marino regno = REGNO (reg);
172*e4b17023SJohn Marino if (regno < FIRST_PSEUDO_REGISTER)
173*e4b17023SJohn Marino remove_from_hard_reg_set (live, GET_MODE (reg), regno);
174*e4b17023SJohn Marino }
175*e4b17023SJohn Marino
176*e4b17023SJohn Marino /* Record in LIVE that register REG became live.
177*e4b17023SJohn Marino This is called via note_stores. */
178*e4b17023SJohn Marino
179*e4b17023SJohn Marino static void
reg_becomes_live(rtx reg,const_rtx setter ATTRIBUTE_UNUSED,void * live)180*e4b17023SJohn Marino reg_becomes_live (rtx reg, const_rtx setter ATTRIBUTE_UNUSED, void *live)
181*e4b17023SJohn Marino {
182*e4b17023SJohn Marino int regno;
183*e4b17023SJohn Marino
184*e4b17023SJohn Marino if (GET_CODE (reg) == SUBREG)
185*e4b17023SJohn Marino reg = SUBREG_REG (reg);
186*e4b17023SJohn Marino
187*e4b17023SJohn Marino if (!REG_P (reg))
188*e4b17023SJohn Marino return;
189*e4b17023SJohn Marino
190*e4b17023SJohn Marino regno = REGNO (reg);
191*e4b17023SJohn Marino if (regno < FIRST_PSEUDO_REGISTER)
192*e4b17023SJohn Marino add_to_hard_reg_set ((HARD_REG_SET *) live, GET_MODE (reg), regno);
193*e4b17023SJohn Marino }
194*e4b17023SJohn Marino
195*e4b17023SJohn Marino /* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined
196*e4b17023SJohn Marino and vice versa. */
197*e4b17023SJohn Marino #if defined (MODE_ENTRY) != defined (MODE_EXIT)
198*e4b17023SJohn Marino #error "Both MODE_ENTRY and MODE_EXIT must be defined"
199*e4b17023SJohn Marino #endif
200*e4b17023SJohn Marino
201*e4b17023SJohn Marino #if defined (MODE_ENTRY) && defined (MODE_EXIT)
202*e4b17023SJohn Marino /* Split the fallthrough edge to the exit block, so that we can note
203*e4b17023SJohn Marino that there NORMAL_MODE is required. Return the new block if it's
204*e4b17023SJohn Marino inserted before the exit block. Otherwise return null. */
205*e4b17023SJohn Marino
206*e4b17023SJohn Marino static basic_block
create_pre_exit(int n_entities,int * entity_map,const int * num_modes)207*e4b17023SJohn Marino create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
208*e4b17023SJohn Marino {
209*e4b17023SJohn Marino edge eg;
210*e4b17023SJohn Marino edge_iterator ei;
211*e4b17023SJohn Marino basic_block pre_exit;
212*e4b17023SJohn Marino
213*e4b17023SJohn Marino /* The only non-call predecessor at this stage is a block with a
214*e4b17023SJohn Marino fallthrough edge; there can be at most one, but there could be
215*e4b17023SJohn Marino none at all, e.g. when exit is called. */
216*e4b17023SJohn Marino pre_exit = 0;
217*e4b17023SJohn Marino FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds)
218*e4b17023SJohn Marino if (eg->flags & EDGE_FALLTHRU)
219*e4b17023SJohn Marino {
220*e4b17023SJohn Marino basic_block src_bb = eg->src;
221*e4b17023SJohn Marino rtx last_insn, ret_reg;
222*e4b17023SJohn Marino
223*e4b17023SJohn Marino gcc_assert (!pre_exit);
224*e4b17023SJohn Marino /* If this function returns a value at the end, we have to
225*e4b17023SJohn Marino insert the final mode switch before the return value copy
226*e4b17023SJohn Marino to its hard register. */
227*e4b17023SJohn Marino if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
228*e4b17023SJohn Marino && NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
229*e4b17023SJohn Marino && GET_CODE (PATTERN (last_insn)) == USE
230*e4b17023SJohn Marino && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
231*e4b17023SJohn Marino {
232*e4b17023SJohn Marino int ret_start = REGNO (ret_reg);
233*e4b17023SJohn Marino int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)];
234*e4b17023SJohn Marino int ret_end = ret_start + nregs;
235*e4b17023SJohn Marino int short_block = 0;
236*e4b17023SJohn Marino int maybe_builtin_apply = 0;
237*e4b17023SJohn Marino int forced_late_switch = 0;
238*e4b17023SJohn Marino rtx before_return_copy;
239*e4b17023SJohn Marino
240*e4b17023SJohn Marino do
241*e4b17023SJohn Marino {
242*e4b17023SJohn Marino rtx return_copy = PREV_INSN (last_insn);
243*e4b17023SJohn Marino rtx return_copy_pat, copy_reg;
244*e4b17023SJohn Marino int copy_start, copy_num;
245*e4b17023SJohn Marino int j;
246*e4b17023SJohn Marino
247*e4b17023SJohn Marino if (INSN_P (return_copy))
248*e4b17023SJohn Marino {
249*e4b17023SJohn Marino /* When using SJLJ exceptions, the call to the
250*e4b17023SJohn Marino unregister function is inserted between the
251*e4b17023SJohn Marino clobber of the return value and the copy.
252*e4b17023SJohn Marino We do not want to split the block before this
253*e4b17023SJohn Marino or any other call; if we have not found the
254*e4b17023SJohn Marino copy yet, the copy must have been deleted. */
255*e4b17023SJohn Marino if (CALL_P (return_copy))
256*e4b17023SJohn Marino {
257*e4b17023SJohn Marino short_block = 1;
258*e4b17023SJohn Marino break;
259*e4b17023SJohn Marino }
260*e4b17023SJohn Marino return_copy_pat = PATTERN (return_copy);
261*e4b17023SJohn Marino switch (GET_CODE (return_copy_pat))
262*e4b17023SJohn Marino {
263*e4b17023SJohn Marino case USE:
264*e4b17023SJohn Marino /* Skip __builtin_apply pattern. */
265*e4b17023SJohn Marino if (GET_CODE (XEXP (return_copy_pat, 0)) == REG
266*e4b17023SJohn Marino && (targetm.calls.function_value_regno_p
267*e4b17023SJohn Marino (REGNO (XEXP (return_copy_pat, 0)))))
268*e4b17023SJohn Marino {
269*e4b17023SJohn Marino maybe_builtin_apply = 1;
270*e4b17023SJohn Marino last_insn = return_copy;
271*e4b17023SJohn Marino continue;
272*e4b17023SJohn Marino }
273*e4b17023SJohn Marino break;
274*e4b17023SJohn Marino
275*e4b17023SJohn Marino case ASM_OPERANDS:
276*e4b17023SJohn Marino /* Skip barrier insns. */
277*e4b17023SJohn Marino if (!MEM_VOLATILE_P (return_copy_pat))
278*e4b17023SJohn Marino break;
279*e4b17023SJohn Marino
280*e4b17023SJohn Marino /* Fall through. */
281*e4b17023SJohn Marino
282*e4b17023SJohn Marino case ASM_INPUT:
283*e4b17023SJohn Marino case UNSPEC_VOLATILE:
284*e4b17023SJohn Marino last_insn = return_copy;
285*e4b17023SJohn Marino continue;
286*e4b17023SJohn Marino
287*e4b17023SJohn Marino default:
288*e4b17023SJohn Marino break;
289*e4b17023SJohn Marino }
290*e4b17023SJohn Marino
291*e4b17023SJohn Marino /* If the return register is not (in its entirety)
292*e4b17023SJohn Marino likely spilled, the return copy might be
293*e4b17023SJohn Marino partially or completely optimized away. */
294*e4b17023SJohn Marino return_copy_pat = single_set (return_copy);
295*e4b17023SJohn Marino if (!return_copy_pat)
296*e4b17023SJohn Marino {
297*e4b17023SJohn Marino return_copy_pat = PATTERN (return_copy);
298*e4b17023SJohn Marino if (GET_CODE (return_copy_pat) != CLOBBER)
299*e4b17023SJohn Marino break;
300*e4b17023SJohn Marino else if (!optimize)
301*e4b17023SJohn Marino {
302*e4b17023SJohn Marino /* This might be (clobber (reg [<result>]))
303*e4b17023SJohn Marino when not optimizing. Then check if
304*e4b17023SJohn Marino the previous insn is the clobber for
305*e4b17023SJohn Marino the return register. */
306*e4b17023SJohn Marino copy_reg = SET_DEST (return_copy_pat);
307*e4b17023SJohn Marino if (GET_CODE (copy_reg) == REG
308*e4b17023SJohn Marino && !HARD_REGISTER_NUM_P (REGNO (copy_reg)))
309*e4b17023SJohn Marino {
310*e4b17023SJohn Marino if (INSN_P (PREV_INSN (return_copy)))
311*e4b17023SJohn Marino {
312*e4b17023SJohn Marino return_copy = PREV_INSN (return_copy);
313*e4b17023SJohn Marino return_copy_pat = PATTERN (return_copy);
314*e4b17023SJohn Marino if (GET_CODE (return_copy_pat) != CLOBBER)
315*e4b17023SJohn Marino break;
316*e4b17023SJohn Marino }
317*e4b17023SJohn Marino }
318*e4b17023SJohn Marino }
319*e4b17023SJohn Marino }
320*e4b17023SJohn Marino copy_reg = SET_DEST (return_copy_pat);
321*e4b17023SJohn Marino if (GET_CODE (copy_reg) == REG)
322*e4b17023SJohn Marino copy_start = REGNO (copy_reg);
323*e4b17023SJohn Marino else if (GET_CODE (copy_reg) == SUBREG
324*e4b17023SJohn Marino && GET_CODE (SUBREG_REG (copy_reg)) == REG)
325*e4b17023SJohn Marino copy_start = REGNO (SUBREG_REG (copy_reg));
326*e4b17023SJohn Marino else
327*e4b17023SJohn Marino break;
328*e4b17023SJohn Marino if (copy_start >= FIRST_PSEUDO_REGISTER)
329*e4b17023SJohn Marino break;
330*e4b17023SJohn Marino copy_num
331*e4b17023SJohn Marino = hard_regno_nregs[copy_start][GET_MODE (copy_reg)];
332*e4b17023SJohn Marino
333*e4b17023SJohn Marino /* If the return register is not likely spilled, - as is
334*e4b17023SJohn Marino the case for floating point on SH4 - then it might
335*e4b17023SJohn Marino be set by an arithmetic operation that needs a
336*e4b17023SJohn Marino different mode than the exit block. */
337*e4b17023SJohn Marino for (j = n_entities - 1; j >= 0; j--)
338*e4b17023SJohn Marino {
339*e4b17023SJohn Marino int e = entity_map[j];
340*e4b17023SJohn Marino int mode = MODE_NEEDED (e, return_copy);
341*e4b17023SJohn Marino
342*e4b17023SJohn Marino if (mode != num_modes[e] && mode != MODE_EXIT (e))
343*e4b17023SJohn Marino break;
344*e4b17023SJohn Marino }
345*e4b17023SJohn Marino if (j >= 0)
346*e4b17023SJohn Marino {
347*e4b17023SJohn Marino /* For the SH4, floating point loads depend on fpscr,
348*e4b17023SJohn Marino thus we might need to put the final mode switch
349*e4b17023SJohn Marino after the return value copy. That is still OK,
350*e4b17023SJohn Marino because a floating point return value does not
351*e4b17023SJohn Marino conflict with address reloads. */
352*e4b17023SJohn Marino if (copy_start >= ret_start
353*e4b17023SJohn Marino && copy_start + copy_num <= ret_end
354*e4b17023SJohn Marino && OBJECT_P (SET_SRC (return_copy_pat)))
355*e4b17023SJohn Marino forced_late_switch = 1;
356*e4b17023SJohn Marino break;
357*e4b17023SJohn Marino }
358*e4b17023SJohn Marino
359*e4b17023SJohn Marino if (copy_start >= ret_start
360*e4b17023SJohn Marino && copy_start + copy_num <= ret_end)
361*e4b17023SJohn Marino nregs -= copy_num;
362*e4b17023SJohn Marino else if (!maybe_builtin_apply
363*e4b17023SJohn Marino || !targetm.calls.function_value_regno_p
364*e4b17023SJohn Marino (copy_start))
365*e4b17023SJohn Marino break;
366*e4b17023SJohn Marino last_insn = return_copy;
367*e4b17023SJohn Marino }
368*e4b17023SJohn Marino /* ??? Exception handling can lead to the return value
369*e4b17023SJohn Marino copy being already separated from the return value use,
370*e4b17023SJohn Marino as in unwind-dw2.c .
371*e4b17023SJohn Marino Similarly, conditionally returning without a value,
372*e4b17023SJohn Marino and conditionally using builtin_return can lead to an
373*e4b17023SJohn Marino isolated use. */
374*e4b17023SJohn Marino if (return_copy == BB_HEAD (src_bb))
375*e4b17023SJohn Marino {
376*e4b17023SJohn Marino short_block = 1;
377*e4b17023SJohn Marino break;
378*e4b17023SJohn Marino }
379*e4b17023SJohn Marino last_insn = return_copy;
380*e4b17023SJohn Marino }
381*e4b17023SJohn Marino while (nregs);
382*e4b17023SJohn Marino
383*e4b17023SJohn Marino /* If we didn't see a full return value copy, verify that there
384*e4b17023SJohn Marino is a plausible reason for this. If some, but not all of the
385*e4b17023SJohn Marino return register is likely spilled, we can expect that there
386*e4b17023SJohn Marino is a copy for the likely spilled part. */
387*e4b17023SJohn Marino gcc_assert (!nregs
388*e4b17023SJohn Marino || forced_late_switch
389*e4b17023SJohn Marino || short_block
390*e4b17023SJohn Marino || !(targetm.class_likely_spilled_p
391*e4b17023SJohn Marino (REGNO_REG_CLASS (ret_start)))
392*e4b17023SJohn Marino || (nregs
393*e4b17023SJohn Marino != hard_regno_nregs[ret_start][GET_MODE (ret_reg)])
394*e4b17023SJohn Marino /* For multi-hard-register floating point
395*e4b17023SJohn Marino values, sometimes the likely-spilled part
396*e4b17023SJohn Marino is ordinarily copied first, then the other
397*e4b17023SJohn Marino part is set with an arithmetic operation.
398*e4b17023SJohn Marino This doesn't actually cause reload
399*e4b17023SJohn Marino failures, so let it pass. */
400*e4b17023SJohn Marino || (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT
401*e4b17023SJohn Marino && nregs != 1));
402*e4b17023SJohn Marino
403*e4b17023SJohn Marino if (INSN_P (last_insn))
404*e4b17023SJohn Marino {
405*e4b17023SJohn Marino before_return_copy
406*e4b17023SJohn Marino = emit_note_before (NOTE_INSN_DELETED, last_insn);
407*e4b17023SJohn Marino /* Instructions preceding LAST_INSN in the same block might
408*e4b17023SJohn Marino require a different mode than MODE_EXIT, so if we might
409*e4b17023SJohn Marino have such instructions, keep them in a separate block
410*e4b17023SJohn Marino from pre_exit. */
411*e4b17023SJohn Marino if (last_insn != BB_HEAD (src_bb))
412*e4b17023SJohn Marino src_bb = split_block (src_bb,
413*e4b17023SJohn Marino PREV_INSN (before_return_copy))->dest;
414*e4b17023SJohn Marino }
415*e4b17023SJohn Marino else
416*e4b17023SJohn Marino before_return_copy = last_insn;
417*e4b17023SJohn Marino pre_exit = split_block (src_bb, before_return_copy)->src;
418*e4b17023SJohn Marino }
419*e4b17023SJohn Marino else
420*e4b17023SJohn Marino {
421*e4b17023SJohn Marino pre_exit = split_edge (eg);
422*e4b17023SJohn Marino }
423*e4b17023SJohn Marino }
424*e4b17023SJohn Marino
425*e4b17023SJohn Marino return pre_exit;
426*e4b17023SJohn Marino }
427*e4b17023SJohn Marino #endif
428*e4b17023SJohn Marino
429*e4b17023SJohn Marino /* Find all insns that need a particular mode setting, and insert the
430*e4b17023SJohn Marino necessary mode switches. Return true if we did work. */
431*e4b17023SJohn Marino
432*e4b17023SJohn Marino static int
optimize_mode_switching(void)433*e4b17023SJohn Marino optimize_mode_switching (void)
434*e4b17023SJohn Marino {
435*e4b17023SJohn Marino rtx insn;
436*e4b17023SJohn Marino int e;
437*e4b17023SJohn Marino basic_block bb;
438*e4b17023SJohn Marino int need_commit = 0;
439*e4b17023SJohn Marino sbitmap *kill;
440*e4b17023SJohn Marino struct edge_list *edge_list;
441*e4b17023SJohn Marino static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
442*e4b17023SJohn Marino #define N_ENTITIES ARRAY_SIZE (num_modes)
443*e4b17023SJohn Marino int entity_map[N_ENTITIES];
444*e4b17023SJohn Marino struct bb_info *bb_info[N_ENTITIES];
445*e4b17023SJohn Marino int i, j;
446*e4b17023SJohn Marino int n_entities;
447*e4b17023SJohn Marino int max_num_modes = 0;
448*e4b17023SJohn Marino bool emited ATTRIBUTE_UNUSED = false;
449*e4b17023SJohn Marino basic_block post_entry ATTRIBUTE_UNUSED, pre_exit ATTRIBUTE_UNUSED;
450*e4b17023SJohn Marino
451*e4b17023SJohn Marino for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
452*e4b17023SJohn Marino if (OPTIMIZE_MODE_SWITCHING (e))
453*e4b17023SJohn Marino {
454*e4b17023SJohn Marino int entry_exit_extra = 0;
455*e4b17023SJohn Marino
456*e4b17023SJohn Marino /* Create the list of segments within each basic block.
457*e4b17023SJohn Marino If NORMAL_MODE is defined, allow for two extra
458*e4b17023SJohn Marino blocks split from the entry and exit block. */
459*e4b17023SJohn Marino #if defined (MODE_ENTRY) && defined (MODE_EXIT)
460*e4b17023SJohn Marino entry_exit_extra = 3;
461*e4b17023SJohn Marino #endif
462*e4b17023SJohn Marino bb_info[n_entities]
463*e4b17023SJohn Marino = XCNEWVEC (struct bb_info, last_basic_block + entry_exit_extra);
464*e4b17023SJohn Marino entity_map[n_entities++] = e;
465*e4b17023SJohn Marino if (num_modes[e] > max_num_modes)
466*e4b17023SJohn Marino max_num_modes = num_modes[e];
467*e4b17023SJohn Marino }
468*e4b17023SJohn Marino
469*e4b17023SJohn Marino if (! n_entities)
470*e4b17023SJohn Marino return 0;
471*e4b17023SJohn Marino
472*e4b17023SJohn Marino #if defined (MODE_ENTRY) && defined (MODE_EXIT)
473*e4b17023SJohn Marino /* Split the edge from the entry block, so that we can note that
474*e4b17023SJohn Marino there NORMAL_MODE is supplied. */
475*e4b17023SJohn Marino post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
476*e4b17023SJohn Marino pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
477*e4b17023SJohn Marino #endif
478*e4b17023SJohn Marino
479*e4b17023SJohn Marino df_analyze ();
480*e4b17023SJohn Marino
481*e4b17023SJohn Marino /* Create the bitmap vectors. */
482*e4b17023SJohn Marino
483*e4b17023SJohn Marino antic = sbitmap_vector_alloc (last_basic_block, n_entities);
484*e4b17023SJohn Marino transp = sbitmap_vector_alloc (last_basic_block, n_entities);
485*e4b17023SJohn Marino comp = sbitmap_vector_alloc (last_basic_block, n_entities);
486*e4b17023SJohn Marino
487*e4b17023SJohn Marino sbitmap_vector_ones (transp, last_basic_block);
488*e4b17023SJohn Marino
489*e4b17023SJohn Marino for (j = n_entities - 1; j >= 0; j--)
490*e4b17023SJohn Marino {
491*e4b17023SJohn Marino int e = entity_map[j];
492*e4b17023SJohn Marino int no_mode = num_modes[e];
493*e4b17023SJohn Marino struct bb_info *info = bb_info[j];
494*e4b17023SJohn Marino
495*e4b17023SJohn Marino /* Determine what the first use (if any) need for a mode of entity E is.
496*e4b17023SJohn Marino This will be the mode that is anticipatable for this block.
497*e4b17023SJohn Marino Also compute the initial transparency settings. */
498*e4b17023SJohn Marino FOR_EACH_BB (bb)
499*e4b17023SJohn Marino {
500*e4b17023SJohn Marino struct seginfo *ptr;
501*e4b17023SJohn Marino int last_mode = no_mode;
502*e4b17023SJohn Marino bool any_set_required = false;
503*e4b17023SJohn Marino HARD_REG_SET live_now;
504*e4b17023SJohn Marino
505*e4b17023SJohn Marino REG_SET_TO_HARD_REG_SET (live_now, df_get_live_in (bb));
506*e4b17023SJohn Marino
507*e4b17023SJohn Marino /* Pretend the mode is clobbered across abnormal edges. */
508*e4b17023SJohn Marino {
509*e4b17023SJohn Marino edge_iterator ei;
510*e4b17023SJohn Marino edge e;
511*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, bb->preds)
512*e4b17023SJohn Marino if (e->flags & EDGE_COMPLEX)
513*e4b17023SJohn Marino break;
514*e4b17023SJohn Marino if (e)
515*e4b17023SJohn Marino {
516*e4b17023SJohn Marino ptr = new_seginfo (no_mode, BB_HEAD (bb), bb->index, live_now);
517*e4b17023SJohn Marino add_seginfo (info + bb->index, ptr);
518*e4b17023SJohn Marino RESET_BIT (transp[bb->index], j);
519*e4b17023SJohn Marino }
520*e4b17023SJohn Marino }
521*e4b17023SJohn Marino
522*e4b17023SJohn Marino FOR_BB_INSNS (bb, insn)
523*e4b17023SJohn Marino {
524*e4b17023SJohn Marino if (INSN_P (insn))
525*e4b17023SJohn Marino {
526*e4b17023SJohn Marino int mode = MODE_NEEDED (e, insn);
527*e4b17023SJohn Marino rtx link;
528*e4b17023SJohn Marino
529*e4b17023SJohn Marino if (mode != no_mode && mode != last_mode)
530*e4b17023SJohn Marino {
531*e4b17023SJohn Marino any_set_required = true;
532*e4b17023SJohn Marino last_mode = mode;
533*e4b17023SJohn Marino ptr = new_seginfo (mode, insn, bb->index, live_now);
534*e4b17023SJohn Marino add_seginfo (info + bb->index, ptr);
535*e4b17023SJohn Marino RESET_BIT (transp[bb->index], j);
536*e4b17023SJohn Marino }
537*e4b17023SJohn Marino #ifdef MODE_AFTER
538*e4b17023SJohn Marino last_mode = MODE_AFTER (last_mode, insn);
539*e4b17023SJohn Marino #endif
540*e4b17023SJohn Marino /* Update LIVE_NOW. */
541*e4b17023SJohn Marino for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
542*e4b17023SJohn Marino if (REG_NOTE_KIND (link) == REG_DEAD)
543*e4b17023SJohn Marino reg_dies (XEXP (link, 0), &live_now);
544*e4b17023SJohn Marino
545*e4b17023SJohn Marino note_stores (PATTERN (insn), reg_becomes_live, &live_now);
546*e4b17023SJohn Marino for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
547*e4b17023SJohn Marino if (REG_NOTE_KIND (link) == REG_UNUSED)
548*e4b17023SJohn Marino reg_dies (XEXP (link, 0), &live_now);
549*e4b17023SJohn Marino }
550*e4b17023SJohn Marino }
551*e4b17023SJohn Marino
552*e4b17023SJohn Marino info[bb->index].computing = last_mode;
553*e4b17023SJohn Marino /* Check for blocks without ANY mode requirements.
554*e4b17023SJohn Marino N.B. because of MODE_AFTER, last_mode might still be different
555*e4b17023SJohn Marino from no_mode. */
556*e4b17023SJohn Marino if (!any_set_required)
557*e4b17023SJohn Marino {
558*e4b17023SJohn Marino ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now);
559*e4b17023SJohn Marino add_seginfo (info + bb->index, ptr);
560*e4b17023SJohn Marino }
561*e4b17023SJohn Marino }
562*e4b17023SJohn Marino #if defined (MODE_ENTRY) && defined (MODE_EXIT)
563*e4b17023SJohn Marino {
564*e4b17023SJohn Marino int mode = MODE_ENTRY (e);
565*e4b17023SJohn Marino
566*e4b17023SJohn Marino if (mode != no_mode)
567*e4b17023SJohn Marino {
568*e4b17023SJohn Marino bb = post_entry;
569*e4b17023SJohn Marino
570*e4b17023SJohn Marino /* By always making this nontransparent, we save
571*e4b17023SJohn Marino an extra check in make_preds_opaque. We also
572*e4b17023SJohn Marino need this to avoid confusing pre_edge_lcm when
573*e4b17023SJohn Marino antic is cleared but transp and comp are set. */
574*e4b17023SJohn Marino RESET_BIT (transp[bb->index], j);
575*e4b17023SJohn Marino
576*e4b17023SJohn Marino /* Insert a fake computing definition of MODE into entry
577*e4b17023SJohn Marino blocks which compute no mode. This represents the mode on
578*e4b17023SJohn Marino entry. */
579*e4b17023SJohn Marino info[bb->index].computing = mode;
580*e4b17023SJohn Marino
581*e4b17023SJohn Marino if (pre_exit)
582*e4b17023SJohn Marino info[pre_exit->index].seginfo->mode = MODE_EXIT (e);
583*e4b17023SJohn Marino }
584*e4b17023SJohn Marino }
585*e4b17023SJohn Marino #endif /* NORMAL_MODE */
586*e4b17023SJohn Marino }
587*e4b17023SJohn Marino
588*e4b17023SJohn Marino kill = sbitmap_vector_alloc (last_basic_block, n_entities);
589*e4b17023SJohn Marino for (i = 0; i < max_num_modes; i++)
590*e4b17023SJohn Marino {
591*e4b17023SJohn Marino int current_mode[N_ENTITIES];
592*e4b17023SJohn Marino sbitmap *del;
593*e4b17023SJohn Marino sbitmap *insert;
594*e4b17023SJohn Marino
595*e4b17023SJohn Marino /* Set the anticipatable and computing arrays. */
596*e4b17023SJohn Marino sbitmap_vector_zero (antic, last_basic_block);
597*e4b17023SJohn Marino sbitmap_vector_zero (comp, last_basic_block);
598*e4b17023SJohn Marino for (j = n_entities - 1; j >= 0; j--)
599*e4b17023SJohn Marino {
600*e4b17023SJohn Marino int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
601*e4b17023SJohn Marino struct bb_info *info = bb_info[j];
602*e4b17023SJohn Marino
603*e4b17023SJohn Marino FOR_EACH_BB (bb)
604*e4b17023SJohn Marino {
605*e4b17023SJohn Marino if (info[bb->index].seginfo->mode == m)
606*e4b17023SJohn Marino SET_BIT (antic[bb->index], j);
607*e4b17023SJohn Marino
608*e4b17023SJohn Marino if (info[bb->index].computing == m)
609*e4b17023SJohn Marino SET_BIT (comp[bb->index], j);
610*e4b17023SJohn Marino }
611*e4b17023SJohn Marino }
612*e4b17023SJohn Marino
613*e4b17023SJohn Marino /* Calculate the optimal locations for the
614*e4b17023SJohn Marino placement mode switches to modes with priority I. */
615*e4b17023SJohn Marino
616*e4b17023SJohn Marino FOR_EACH_BB (bb)
617*e4b17023SJohn Marino sbitmap_not (kill[bb->index], transp[bb->index]);
618*e4b17023SJohn Marino edge_list = pre_edge_lcm (n_entities, transp, comp, antic,
619*e4b17023SJohn Marino kill, &insert, &del);
620*e4b17023SJohn Marino
621*e4b17023SJohn Marino for (j = n_entities - 1; j >= 0; j--)
622*e4b17023SJohn Marino {
623*e4b17023SJohn Marino /* Insert all mode sets that have been inserted by lcm. */
624*e4b17023SJohn Marino int no_mode = num_modes[entity_map[j]];
625*e4b17023SJohn Marino
626*e4b17023SJohn Marino /* Wherever we have moved a mode setting upwards in the flow graph,
627*e4b17023SJohn Marino the blocks between the new setting site and the now redundant
628*e4b17023SJohn Marino computation ceases to be transparent for any lower-priority
629*e4b17023SJohn Marino mode of the same entity. First set the aux field of each
630*e4b17023SJohn Marino insertion site edge non-transparent, then propagate the new
631*e4b17023SJohn Marino non-transparency from the redundant computation upwards till
632*e4b17023SJohn Marino we hit an insertion site or an already non-transparent block. */
633*e4b17023SJohn Marino for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--)
634*e4b17023SJohn Marino {
635*e4b17023SJohn Marino edge eg = INDEX_EDGE (edge_list, e);
636*e4b17023SJohn Marino int mode;
637*e4b17023SJohn Marino basic_block src_bb;
638*e4b17023SJohn Marino HARD_REG_SET live_at_edge;
639*e4b17023SJohn Marino rtx mode_set;
640*e4b17023SJohn Marino
641*e4b17023SJohn Marino eg->aux = 0;
642*e4b17023SJohn Marino
643*e4b17023SJohn Marino if (! TEST_BIT (insert[e], j))
644*e4b17023SJohn Marino continue;
645*e4b17023SJohn Marino
646*e4b17023SJohn Marino eg->aux = (void *)1;
647*e4b17023SJohn Marino
648*e4b17023SJohn Marino mode = current_mode[j];
649*e4b17023SJohn Marino src_bb = eg->src;
650*e4b17023SJohn Marino
651*e4b17023SJohn Marino REG_SET_TO_HARD_REG_SET (live_at_edge, df_get_live_out (src_bb));
652*e4b17023SJohn Marino
653*e4b17023SJohn Marino start_sequence ();
654*e4b17023SJohn Marino EMIT_MODE_SET (entity_map[j], mode, live_at_edge);
655*e4b17023SJohn Marino mode_set = get_insns ();
656*e4b17023SJohn Marino end_sequence ();
657*e4b17023SJohn Marino
658*e4b17023SJohn Marino /* Do not bother to insert empty sequence. */
659*e4b17023SJohn Marino if (mode_set == NULL_RTX)
660*e4b17023SJohn Marino continue;
661*e4b17023SJohn Marino
662*e4b17023SJohn Marino /* We should not get an abnormal edge here. */
663*e4b17023SJohn Marino gcc_assert (! (eg->flags & EDGE_ABNORMAL));
664*e4b17023SJohn Marino
665*e4b17023SJohn Marino need_commit = 1;
666*e4b17023SJohn Marino insert_insn_on_edge (mode_set, eg);
667*e4b17023SJohn Marino }
668*e4b17023SJohn Marino
669*e4b17023SJohn Marino FOR_EACH_BB_REVERSE (bb)
670*e4b17023SJohn Marino if (TEST_BIT (del[bb->index], j))
671*e4b17023SJohn Marino {
672*e4b17023SJohn Marino make_preds_opaque (bb, j);
673*e4b17023SJohn Marino /* Cancel the 'deleted' mode set. */
674*e4b17023SJohn Marino bb_info[j][bb->index].seginfo->mode = no_mode;
675*e4b17023SJohn Marino }
676*e4b17023SJohn Marino }
677*e4b17023SJohn Marino
678*e4b17023SJohn Marino sbitmap_vector_free (del);
679*e4b17023SJohn Marino sbitmap_vector_free (insert);
680*e4b17023SJohn Marino clear_aux_for_edges ();
681*e4b17023SJohn Marino free_edge_list (edge_list);
682*e4b17023SJohn Marino }
683*e4b17023SJohn Marino
684*e4b17023SJohn Marino /* Now output the remaining mode sets in all the segments. */
685*e4b17023SJohn Marino for (j = n_entities - 1; j >= 0; j--)
686*e4b17023SJohn Marino {
687*e4b17023SJohn Marino int no_mode = num_modes[entity_map[j]];
688*e4b17023SJohn Marino
689*e4b17023SJohn Marino FOR_EACH_BB_REVERSE (bb)
690*e4b17023SJohn Marino {
691*e4b17023SJohn Marino struct seginfo *ptr, *next;
692*e4b17023SJohn Marino for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next)
693*e4b17023SJohn Marino {
694*e4b17023SJohn Marino next = ptr->next;
695*e4b17023SJohn Marino if (ptr->mode != no_mode)
696*e4b17023SJohn Marino {
697*e4b17023SJohn Marino rtx mode_set;
698*e4b17023SJohn Marino
699*e4b17023SJohn Marino start_sequence ();
700*e4b17023SJohn Marino EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live);
701*e4b17023SJohn Marino mode_set = get_insns ();
702*e4b17023SJohn Marino end_sequence ();
703*e4b17023SJohn Marino
704*e4b17023SJohn Marino /* Insert MODE_SET only if it is nonempty. */
705*e4b17023SJohn Marino if (mode_set != NULL_RTX)
706*e4b17023SJohn Marino {
707*e4b17023SJohn Marino emited = true;
708*e4b17023SJohn Marino if (NOTE_INSN_BASIC_BLOCK_P (ptr->insn_ptr))
709*e4b17023SJohn Marino emit_insn_after (mode_set, ptr->insn_ptr);
710*e4b17023SJohn Marino else
711*e4b17023SJohn Marino emit_insn_before (mode_set, ptr->insn_ptr);
712*e4b17023SJohn Marino }
713*e4b17023SJohn Marino }
714*e4b17023SJohn Marino
715*e4b17023SJohn Marino free (ptr);
716*e4b17023SJohn Marino }
717*e4b17023SJohn Marino }
718*e4b17023SJohn Marino
719*e4b17023SJohn Marino free (bb_info[j]);
720*e4b17023SJohn Marino }
721*e4b17023SJohn Marino
722*e4b17023SJohn Marino /* Finished. Free up all the things we've allocated. */
723*e4b17023SJohn Marino sbitmap_vector_free (kill);
724*e4b17023SJohn Marino sbitmap_vector_free (antic);
725*e4b17023SJohn Marino sbitmap_vector_free (transp);
726*e4b17023SJohn Marino sbitmap_vector_free (comp);
727*e4b17023SJohn Marino
728*e4b17023SJohn Marino if (need_commit)
729*e4b17023SJohn Marino commit_edge_insertions ();
730*e4b17023SJohn Marino
731*e4b17023SJohn Marino #if defined (MODE_ENTRY) && defined (MODE_EXIT)
732*e4b17023SJohn Marino cleanup_cfg (CLEANUP_NO_INSN_DEL);
733*e4b17023SJohn Marino #else
734*e4b17023SJohn Marino if (!need_commit && !emited)
735*e4b17023SJohn Marino return 0;
736*e4b17023SJohn Marino #endif
737*e4b17023SJohn Marino
738*e4b17023SJohn Marino return 1;
739*e4b17023SJohn Marino }
740*e4b17023SJohn Marino
741*e4b17023SJohn Marino #endif /* OPTIMIZE_MODE_SWITCHING */
742*e4b17023SJohn Marino
743*e4b17023SJohn Marino static bool
gate_mode_switching(void)744*e4b17023SJohn Marino gate_mode_switching (void)
745*e4b17023SJohn Marino {
746*e4b17023SJohn Marino #ifdef OPTIMIZE_MODE_SWITCHING
747*e4b17023SJohn Marino return true;
748*e4b17023SJohn Marino #else
749*e4b17023SJohn Marino return false;
750*e4b17023SJohn Marino #endif
751*e4b17023SJohn Marino }
752*e4b17023SJohn Marino
753*e4b17023SJohn Marino static unsigned int
rest_of_handle_mode_switching(void)754*e4b17023SJohn Marino rest_of_handle_mode_switching (void)
755*e4b17023SJohn Marino {
756*e4b17023SJohn Marino #ifdef OPTIMIZE_MODE_SWITCHING
757*e4b17023SJohn Marino optimize_mode_switching ();
758*e4b17023SJohn Marino #endif /* OPTIMIZE_MODE_SWITCHING */
759*e4b17023SJohn Marino return 0;
760*e4b17023SJohn Marino }
761*e4b17023SJohn Marino
762*e4b17023SJohn Marino
763*e4b17023SJohn Marino struct rtl_opt_pass pass_mode_switching =
764*e4b17023SJohn Marino {
765*e4b17023SJohn Marino {
766*e4b17023SJohn Marino RTL_PASS,
767*e4b17023SJohn Marino "mode_sw", /* name */
768*e4b17023SJohn Marino gate_mode_switching, /* gate */
769*e4b17023SJohn Marino rest_of_handle_mode_switching, /* execute */
770*e4b17023SJohn Marino NULL, /* sub */
771*e4b17023SJohn Marino NULL, /* next */
772*e4b17023SJohn Marino 0, /* static_pass_number */
773*e4b17023SJohn Marino TV_MODE_SWITCH, /* tv_id */
774*e4b17023SJohn Marino 0, /* properties_required */
775*e4b17023SJohn Marino 0, /* properties_provided */
776*e4b17023SJohn Marino 0, /* properties_destroyed */
777*e4b17023SJohn Marino 0, /* todo_flags_start */
778*e4b17023SJohn Marino TODO_df_finish | TODO_verify_rtl_sharing |
779*e4b17023SJohn Marino 0 /* todo_flags_finish */
780*e4b17023SJohn Marino }
781*e4b17023SJohn Marino };
782