xref: /openbsd-src/gnu/usr.bin/gcc/gcc/rtlanal.c (revision c87b03e512fc05ed6e0222f6fb0ae86264b1d05b)
1*c87b03e5Sespie /* Analyze RTL for C-Compiler
2*c87b03e5Sespie    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*c87b03e5Sespie    1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4*c87b03e5Sespie 
5*c87b03e5Sespie This file is part of GCC.
6*c87b03e5Sespie 
7*c87b03e5Sespie GCC is free software; you can redistribute it and/or modify it under
8*c87b03e5Sespie the terms of the GNU General Public License as published by the Free
9*c87b03e5Sespie Software Foundation; either version 2, or (at your option) any later
10*c87b03e5Sespie version.
11*c87b03e5Sespie 
12*c87b03e5Sespie GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13*c87b03e5Sespie WARRANTY; without even the implied warranty of MERCHANTABILITY or
14*c87b03e5Sespie FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15*c87b03e5Sespie for more details.
16*c87b03e5Sespie 
17*c87b03e5Sespie You should have received a copy of the GNU General Public License
18*c87b03e5Sespie along with GCC; see the file COPYING.  If not, write to the Free
19*c87b03e5Sespie Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20*c87b03e5Sespie 02111-1307, USA.  */
21*c87b03e5Sespie 
22*c87b03e5Sespie 
23*c87b03e5Sespie #include "config.h"
24*c87b03e5Sespie #include "system.h"
25*c87b03e5Sespie #include "toplev.h"
26*c87b03e5Sespie #include "rtl.h"
27*c87b03e5Sespie #include "hard-reg-set.h"
28*c87b03e5Sespie #include "insn-config.h"
29*c87b03e5Sespie #include "recog.h"
30*c87b03e5Sespie #include "tm_p.h"
31*c87b03e5Sespie #include "flags.h"
32*c87b03e5Sespie #include "basic-block.h"
33*c87b03e5Sespie #include "real.h"
34*c87b03e5Sespie 
35*c87b03e5Sespie /* Forward declarations */
36*c87b03e5Sespie static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
37*c87b03e5Sespie static void set_of_1		PARAMS ((rtx, rtx, void *));
38*c87b03e5Sespie static void insn_dependent_p_1	PARAMS ((rtx, rtx, void *));
39*c87b03e5Sespie static int computed_jump_p_1	PARAMS ((rtx));
40*c87b03e5Sespie static void parms_set 		PARAMS ((rtx, rtx, void *));
41*c87b03e5Sespie static bool hoist_test_store		PARAMS ((rtx, rtx, regset));
42*c87b03e5Sespie static void hoist_update_store		PARAMS ((rtx, rtx *, rtx, rtx));
43*c87b03e5Sespie 
44*c87b03e5Sespie /* Bit flags that specify the machine subtype we are compiling for.
45*c87b03e5Sespie    Bits are tested using macros TARGET_... defined in the tm.h file
46*c87b03e5Sespie    and set by `-m...' switches.  Must be defined in rtlanal.c.  */
47*c87b03e5Sespie 
48*c87b03e5Sespie int target_flags;
49*c87b03e5Sespie 
50*c87b03e5Sespie /* Return 1 if the value of X is unstable
51*c87b03e5Sespie    (would be different at a different point in the program).
52*c87b03e5Sespie    The frame pointer, arg pointer, etc. are considered stable
53*c87b03e5Sespie    (within one function) and so is anything marked `unchanging'.  */
54*c87b03e5Sespie 
55*c87b03e5Sespie int
rtx_unstable_p(x)56*c87b03e5Sespie rtx_unstable_p (x)
57*c87b03e5Sespie      rtx x;
58*c87b03e5Sespie {
59*c87b03e5Sespie   RTX_CODE code = GET_CODE (x);
60*c87b03e5Sespie   int i;
61*c87b03e5Sespie   const char *fmt;
62*c87b03e5Sespie 
63*c87b03e5Sespie   switch (code)
64*c87b03e5Sespie     {
65*c87b03e5Sespie     case MEM:
66*c87b03e5Sespie       return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
67*c87b03e5Sespie 
68*c87b03e5Sespie     case QUEUED:
69*c87b03e5Sespie       return 1;
70*c87b03e5Sespie 
71*c87b03e5Sespie     case ADDRESSOF:
72*c87b03e5Sespie     case CONST:
73*c87b03e5Sespie     case CONST_INT:
74*c87b03e5Sespie     case CONST_DOUBLE:
75*c87b03e5Sespie     case CONST_VECTOR:
76*c87b03e5Sespie     case SYMBOL_REF:
77*c87b03e5Sespie     case LABEL_REF:
78*c87b03e5Sespie       return 0;
79*c87b03e5Sespie 
80*c87b03e5Sespie     case REG:
81*c87b03e5Sespie       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
82*c87b03e5Sespie       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
83*c87b03e5Sespie 	  /* The arg pointer varies if it is not a fixed register.  */
84*c87b03e5Sespie 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
85*c87b03e5Sespie 	  || RTX_UNCHANGING_P (x))
86*c87b03e5Sespie 	return 0;
87*c87b03e5Sespie #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
88*c87b03e5Sespie       /* ??? When call-clobbered, the value is stable modulo the restore
89*c87b03e5Sespie 	 that must happen after a call.  This currently screws up local-alloc
90*c87b03e5Sespie 	 into believing that the restore is not needed.  */
91*c87b03e5Sespie       if (x == pic_offset_table_rtx)
92*c87b03e5Sespie 	return 0;
93*c87b03e5Sespie #endif
94*c87b03e5Sespie       return 1;
95*c87b03e5Sespie 
96*c87b03e5Sespie     case ASM_OPERANDS:
97*c87b03e5Sespie       if (MEM_VOLATILE_P (x))
98*c87b03e5Sespie 	return 1;
99*c87b03e5Sespie 
100*c87b03e5Sespie       /* FALLTHROUGH */
101*c87b03e5Sespie 
102*c87b03e5Sespie     default:
103*c87b03e5Sespie       break;
104*c87b03e5Sespie     }
105*c87b03e5Sespie 
106*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
107*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
108*c87b03e5Sespie     if (fmt[i] == 'e')
109*c87b03e5Sespie       {
110*c87b03e5Sespie 	if (rtx_unstable_p (XEXP (x, i)))
111*c87b03e5Sespie 	  return 1;
112*c87b03e5Sespie       }
113*c87b03e5Sespie     else if (fmt[i] == 'E')
114*c87b03e5Sespie       {
115*c87b03e5Sespie 	int j;
116*c87b03e5Sespie 	for (j = 0; j < XVECLEN (x, i); j++)
117*c87b03e5Sespie 	  if (rtx_unstable_p (XVECEXP (x, i, j)))
118*c87b03e5Sespie 	    return 1;
119*c87b03e5Sespie       }
120*c87b03e5Sespie 
121*c87b03e5Sespie   return 0;
122*c87b03e5Sespie }
123*c87b03e5Sespie 
124*c87b03e5Sespie /* Return 1 if X has a value that can vary even between two
125*c87b03e5Sespie    executions of the program.  0 means X can be compared reliably
126*c87b03e5Sespie    against certain constants or near-constants.
127*c87b03e5Sespie    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
128*c87b03e5Sespie    zero, we are slightly more conservative.
129*c87b03e5Sespie    The frame pointer and the arg pointer are considered constant.  */
130*c87b03e5Sespie 
131*c87b03e5Sespie int
rtx_varies_p(x,for_alias)132*c87b03e5Sespie rtx_varies_p (x, for_alias)
133*c87b03e5Sespie      rtx x;
134*c87b03e5Sespie      int for_alias;
135*c87b03e5Sespie {
136*c87b03e5Sespie   RTX_CODE code = GET_CODE (x);
137*c87b03e5Sespie   int i;
138*c87b03e5Sespie   const char *fmt;
139*c87b03e5Sespie 
140*c87b03e5Sespie   switch (code)
141*c87b03e5Sespie     {
142*c87b03e5Sespie     case MEM:
143*c87b03e5Sespie       return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
144*c87b03e5Sespie 
145*c87b03e5Sespie     case QUEUED:
146*c87b03e5Sespie       return 1;
147*c87b03e5Sespie 
148*c87b03e5Sespie     case CONST:
149*c87b03e5Sespie     case CONST_INT:
150*c87b03e5Sespie     case CONST_DOUBLE:
151*c87b03e5Sespie     case CONST_VECTOR:
152*c87b03e5Sespie     case SYMBOL_REF:
153*c87b03e5Sespie     case LABEL_REF:
154*c87b03e5Sespie       return 0;
155*c87b03e5Sespie 
156*c87b03e5Sespie     case REG:
157*c87b03e5Sespie       /* Note that we have to test for the actual rtx used for the frame
158*c87b03e5Sespie 	 and arg pointers and not just the register number in case we have
159*c87b03e5Sespie 	 eliminated the frame and/or arg pointer and are using it
160*c87b03e5Sespie 	 for pseudos.  */
161*c87b03e5Sespie       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
162*c87b03e5Sespie 	  /* The arg pointer varies if it is not a fixed register.  */
163*c87b03e5Sespie 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
164*c87b03e5Sespie 	return 0;
165*c87b03e5Sespie       if (x == pic_offset_table_rtx
166*c87b03e5Sespie #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
167*c87b03e5Sespie 	  /* ??? When call-clobbered, the value is stable modulo the restore
168*c87b03e5Sespie 	     that must happen after a call.  This currently screws up
169*c87b03e5Sespie 	     local-alloc into believing that the restore is not needed, so we
170*c87b03e5Sespie 	     must return 0 only if we are called from alias analysis.  */
171*c87b03e5Sespie 	  && for_alias
172*c87b03e5Sespie #endif
173*c87b03e5Sespie 	  )
174*c87b03e5Sespie 	return 0;
175*c87b03e5Sespie       return 1;
176*c87b03e5Sespie 
177*c87b03e5Sespie     case LO_SUM:
178*c87b03e5Sespie       /* The operand 0 of a LO_SUM is considered constant
179*c87b03e5Sespie 	 (in fact it is related specifically to operand 1)
180*c87b03e5Sespie 	 during alias analysis.  */
181*c87b03e5Sespie       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
182*c87b03e5Sespie 	     || rtx_varies_p (XEXP (x, 1), for_alias);
183*c87b03e5Sespie 
184*c87b03e5Sespie     case ASM_OPERANDS:
185*c87b03e5Sespie       if (MEM_VOLATILE_P (x))
186*c87b03e5Sespie 	return 1;
187*c87b03e5Sespie 
188*c87b03e5Sespie       /* FALLTHROUGH */
189*c87b03e5Sespie 
190*c87b03e5Sespie     default:
191*c87b03e5Sespie       break;
192*c87b03e5Sespie     }
193*c87b03e5Sespie 
194*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
195*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
196*c87b03e5Sespie     if (fmt[i] == 'e')
197*c87b03e5Sespie       {
198*c87b03e5Sespie 	if (rtx_varies_p (XEXP (x, i), for_alias))
199*c87b03e5Sespie 	  return 1;
200*c87b03e5Sespie       }
201*c87b03e5Sespie     else if (fmt[i] == 'E')
202*c87b03e5Sespie       {
203*c87b03e5Sespie 	int j;
204*c87b03e5Sespie 	for (j = 0; j < XVECLEN (x, i); j++)
205*c87b03e5Sespie 	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
206*c87b03e5Sespie 	    return 1;
207*c87b03e5Sespie       }
208*c87b03e5Sespie 
209*c87b03e5Sespie   return 0;
210*c87b03e5Sespie }
211*c87b03e5Sespie 
212*c87b03e5Sespie /* Return 0 if the use of X as an address in a MEM can cause a trap.  */
213*c87b03e5Sespie 
214*c87b03e5Sespie int
rtx_addr_can_trap_p(x)215*c87b03e5Sespie rtx_addr_can_trap_p (x)
216*c87b03e5Sespie      rtx x;
217*c87b03e5Sespie {
218*c87b03e5Sespie   enum rtx_code code = GET_CODE (x);
219*c87b03e5Sespie 
220*c87b03e5Sespie   switch (code)
221*c87b03e5Sespie     {
222*c87b03e5Sespie     case SYMBOL_REF:
223*c87b03e5Sespie       return SYMBOL_REF_WEAK (x);
224*c87b03e5Sespie 
225*c87b03e5Sespie     case LABEL_REF:
226*c87b03e5Sespie       return 0;
227*c87b03e5Sespie 
228*c87b03e5Sespie     case REG:
229*c87b03e5Sespie       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
230*c87b03e5Sespie       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
231*c87b03e5Sespie 	  || x == stack_pointer_rtx
232*c87b03e5Sespie 	  /* The arg pointer varies if it is not a fixed register.  */
233*c87b03e5Sespie 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
234*c87b03e5Sespie 	return 0;
235*c87b03e5Sespie       /* All of the virtual frame registers are stack references.  */
236*c87b03e5Sespie       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
237*c87b03e5Sespie 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
238*c87b03e5Sespie 	return 0;
239*c87b03e5Sespie       return 1;
240*c87b03e5Sespie 
241*c87b03e5Sespie     case CONST:
242*c87b03e5Sespie       return rtx_addr_can_trap_p (XEXP (x, 0));
243*c87b03e5Sespie 
244*c87b03e5Sespie     case PLUS:
245*c87b03e5Sespie       /* An address is assumed not to trap if it is an address that can't
246*c87b03e5Sespie 	 trap plus a constant integer or it is the pic register plus a
247*c87b03e5Sespie 	 constant.  */
248*c87b03e5Sespie       return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
249*c87b03e5Sespie 		 && GET_CODE (XEXP (x, 1)) == CONST_INT)
250*c87b03e5Sespie 		|| (XEXP (x, 0) == pic_offset_table_rtx
251*c87b03e5Sespie 		    && CONSTANT_P (XEXP (x, 1))));
252*c87b03e5Sespie 
253*c87b03e5Sespie     case LO_SUM:
254*c87b03e5Sespie     case PRE_MODIFY:
255*c87b03e5Sespie       return rtx_addr_can_trap_p (XEXP (x, 1));
256*c87b03e5Sespie 
257*c87b03e5Sespie     case PRE_DEC:
258*c87b03e5Sespie     case PRE_INC:
259*c87b03e5Sespie     case POST_DEC:
260*c87b03e5Sespie     case POST_INC:
261*c87b03e5Sespie     case POST_MODIFY:
262*c87b03e5Sespie       return rtx_addr_can_trap_p (XEXP (x, 0));
263*c87b03e5Sespie 
264*c87b03e5Sespie     default:
265*c87b03e5Sespie       break;
266*c87b03e5Sespie     }
267*c87b03e5Sespie 
268*c87b03e5Sespie   /* If it isn't one of the case above, it can cause a trap.  */
269*c87b03e5Sespie   return 1;
270*c87b03e5Sespie }
271*c87b03e5Sespie 
272*c87b03e5Sespie /* Return 1 if X refers to a memory location whose address
273*c87b03e5Sespie    cannot be compared reliably with constant addresses,
274*c87b03e5Sespie    or if X refers to a BLKmode memory object.
275*c87b03e5Sespie    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
276*c87b03e5Sespie    zero, we are slightly more conservative.  */
277*c87b03e5Sespie 
278*c87b03e5Sespie int
rtx_addr_varies_p(x,for_alias)279*c87b03e5Sespie rtx_addr_varies_p (x, for_alias)
280*c87b03e5Sespie      rtx x;
281*c87b03e5Sespie      int for_alias;
282*c87b03e5Sespie {
283*c87b03e5Sespie   enum rtx_code code;
284*c87b03e5Sespie   int i;
285*c87b03e5Sespie   const char *fmt;
286*c87b03e5Sespie 
287*c87b03e5Sespie   if (x == 0)
288*c87b03e5Sespie     return 0;
289*c87b03e5Sespie 
290*c87b03e5Sespie   code = GET_CODE (x);
291*c87b03e5Sespie   if (code == MEM)
292*c87b03e5Sespie     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
293*c87b03e5Sespie 
294*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
295*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
296*c87b03e5Sespie     if (fmt[i] == 'e')
297*c87b03e5Sespie       {
298*c87b03e5Sespie 	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
299*c87b03e5Sespie 	  return 1;
300*c87b03e5Sespie       }
301*c87b03e5Sespie     else if (fmt[i] == 'E')
302*c87b03e5Sespie       {
303*c87b03e5Sespie 	int j;
304*c87b03e5Sespie 	for (j = 0; j < XVECLEN (x, i); j++)
305*c87b03e5Sespie 	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
306*c87b03e5Sespie 	    return 1;
307*c87b03e5Sespie       }
308*c87b03e5Sespie   return 0;
309*c87b03e5Sespie }
310*c87b03e5Sespie 
311*c87b03e5Sespie /* Return the value of the integer term in X, if one is apparent;
312*c87b03e5Sespie    otherwise return 0.
313*c87b03e5Sespie    Only obvious integer terms are detected.
314*c87b03e5Sespie    This is used in cse.c with the `related_value' field.  */
315*c87b03e5Sespie 
316*c87b03e5Sespie HOST_WIDE_INT
get_integer_term(x)317*c87b03e5Sespie get_integer_term (x)
318*c87b03e5Sespie      rtx x;
319*c87b03e5Sespie {
320*c87b03e5Sespie   if (GET_CODE (x) == CONST)
321*c87b03e5Sespie     x = XEXP (x, 0);
322*c87b03e5Sespie 
323*c87b03e5Sespie   if (GET_CODE (x) == MINUS
324*c87b03e5Sespie       && GET_CODE (XEXP (x, 1)) == CONST_INT)
325*c87b03e5Sespie     return - INTVAL (XEXP (x, 1));
326*c87b03e5Sespie   if (GET_CODE (x) == PLUS
327*c87b03e5Sespie       && GET_CODE (XEXP (x, 1)) == CONST_INT)
328*c87b03e5Sespie     return INTVAL (XEXP (x, 1));
329*c87b03e5Sespie   return 0;
330*c87b03e5Sespie }
331*c87b03e5Sespie 
332*c87b03e5Sespie /* If X is a constant, return the value sans apparent integer term;
333*c87b03e5Sespie    otherwise return 0.
334*c87b03e5Sespie    Only obvious integer terms are detected.  */
335*c87b03e5Sespie 
336*c87b03e5Sespie rtx
get_related_value(x)337*c87b03e5Sespie get_related_value (x)
338*c87b03e5Sespie      rtx x;
339*c87b03e5Sespie {
340*c87b03e5Sespie   if (GET_CODE (x) != CONST)
341*c87b03e5Sespie     return 0;
342*c87b03e5Sespie   x = XEXP (x, 0);
343*c87b03e5Sespie   if (GET_CODE (x) == PLUS
344*c87b03e5Sespie       && GET_CODE (XEXP (x, 1)) == CONST_INT)
345*c87b03e5Sespie     return XEXP (x, 0);
346*c87b03e5Sespie   else if (GET_CODE (x) == MINUS
347*c87b03e5Sespie 	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
348*c87b03e5Sespie     return XEXP (x, 0);
349*c87b03e5Sespie   return 0;
350*c87b03e5Sespie }
351*c87b03e5Sespie 
352*c87b03e5Sespie /* Given a tablejump insn INSN, return the RTL expression for the offset
353*c87b03e5Sespie    into the jump table.  If the offset cannot be determined, then return
354*c87b03e5Sespie    NULL_RTX.
355*c87b03e5Sespie 
356*c87b03e5Sespie    If EARLIEST is nonzero, it is a pointer to a place where the earliest
357*c87b03e5Sespie    insn used in locating the offset was found.  */
358*c87b03e5Sespie 
359*c87b03e5Sespie rtx
get_jump_table_offset(insn,earliest)360*c87b03e5Sespie get_jump_table_offset (insn, earliest)
361*c87b03e5Sespie      rtx insn;
362*c87b03e5Sespie      rtx *earliest;
363*c87b03e5Sespie {
364*c87b03e5Sespie   rtx label;
365*c87b03e5Sespie   rtx table;
366*c87b03e5Sespie   rtx set;
367*c87b03e5Sespie   rtx old_insn;
368*c87b03e5Sespie   rtx x;
369*c87b03e5Sespie   rtx old_x;
370*c87b03e5Sespie   rtx y;
371*c87b03e5Sespie   rtx old_y;
372*c87b03e5Sespie   int i;
373*c87b03e5Sespie 
374*c87b03e5Sespie   if (GET_CODE (insn) != JUMP_INSN
375*c87b03e5Sespie       || ! (label = JUMP_LABEL (insn))
376*c87b03e5Sespie       || ! (table = NEXT_INSN (label))
377*c87b03e5Sespie       || GET_CODE (table) != JUMP_INSN
378*c87b03e5Sespie       || (GET_CODE (PATTERN (table)) != ADDR_VEC
379*c87b03e5Sespie 	  && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
380*c87b03e5Sespie       || ! (set = single_set (insn)))
381*c87b03e5Sespie     return NULL_RTX;
382*c87b03e5Sespie 
383*c87b03e5Sespie   x = SET_SRC (set);
384*c87b03e5Sespie 
385*c87b03e5Sespie   /* Some targets (eg, ARM) emit a tablejump that also
386*c87b03e5Sespie      contains the out-of-range target.  */
387*c87b03e5Sespie   if (GET_CODE (x) == IF_THEN_ELSE
388*c87b03e5Sespie       && GET_CODE (XEXP (x, 2)) == LABEL_REF)
389*c87b03e5Sespie     x = XEXP (x, 1);
390*c87b03e5Sespie 
391*c87b03e5Sespie   /* Search backwards and locate the expression stored in X.  */
392*c87b03e5Sespie   for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
393*c87b03e5Sespie        old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
394*c87b03e5Sespie     ;
395*c87b03e5Sespie 
396*c87b03e5Sespie   /* If X is an expression using a relative address then strip
397*c87b03e5Sespie      off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
398*c87b03e5Sespie      or the jump table label.  */
399*c87b03e5Sespie   if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
400*c87b03e5Sespie       && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
401*c87b03e5Sespie     {
402*c87b03e5Sespie       for (i = 0; i < 2; i++)
403*c87b03e5Sespie 	{
404*c87b03e5Sespie 	  old_insn = insn;
405*c87b03e5Sespie 	  y = XEXP (x, i);
406*c87b03e5Sespie 
407*c87b03e5Sespie 	  if (y == pc_rtx || y == pic_offset_table_rtx)
408*c87b03e5Sespie 	    break;
409*c87b03e5Sespie 
410*c87b03e5Sespie 	  for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
411*c87b03e5Sespie 	       old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
412*c87b03e5Sespie 	    ;
413*c87b03e5Sespie 
414*c87b03e5Sespie 	  if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
415*c87b03e5Sespie 	    break;
416*c87b03e5Sespie 	}
417*c87b03e5Sespie 
418*c87b03e5Sespie       if (i >= 2)
419*c87b03e5Sespie 	return NULL_RTX;
420*c87b03e5Sespie 
421*c87b03e5Sespie       x = XEXP (x, 1 - i);
422*c87b03e5Sespie 
423*c87b03e5Sespie       for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
424*c87b03e5Sespie 	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
425*c87b03e5Sespie 	;
426*c87b03e5Sespie     }
427*c87b03e5Sespie 
428*c87b03e5Sespie   /* Strip off any sign or zero extension.  */
429*c87b03e5Sespie   if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
430*c87b03e5Sespie     {
431*c87b03e5Sespie       x = XEXP (x, 0);
432*c87b03e5Sespie 
433*c87b03e5Sespie       for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
434*c87b03e5Sespie 	   old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
435*c87b03e5Sespie 	;
436*c87b03e5Sespie     }
437*c87b03e5Sespie 
438*c87b03e5Sespie   /* If X isn't a MEM then this isn't a tablejump we understand.  */
439*c87b03e5Sespie   if (GET_CODE (x) != MEM)
440*c87b03e5Sespie     return NULL_RTX;
441*c87b03e5Sespie 
442*c87b03e5Sespie   /* Strip off the MEM.  */
443*c87b03e5Sespie   x = XEXP (x, 0);
444*c87b03e5Sespie 
445*c87b03e5Sespie   for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
446*c87b03e5Sespie        old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
447*c87b03e5Sespie     ;
448*c87b03e5Sespie 
449*c87b03e5Sespie   /* If X isn't a PLUS than this isn't a tablejump we understand.  */
450*c87b03e5Sespie   if (GET_CODE (x) != PLUS)
451*c87b03e5Sespie     return NULL_RTX;
452*c87b03e5Sespie 
453*c87b03e5Sespie   /* At this point we should have an expression representing the jump table
454*c87b03e5Sespie      plus an offset.  Examine each operand in order to determine which one
455*c87b03e5Sespie      represents the jump table.  Knowing that tells us that the other operand
456*c87b03e5Sespie      must represent the offset.  */
457*c87b03e5Sespie   for (i = 0; i < 2; i++)
458*c87b03e5Sespie     {
459*c87b03e5Sespie       old_insn = insn;
460*c87b03e5Sespie       y = XEXP (x, i);
461*c87b03e5Sespie 
462*c87b03e5Sespie       for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
463*c87b03e5Sespie 	   old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
464*c87b03e5Sespie 	;
465*c87b03e5Sespie 
466*c87b03e5Sespie       if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
467*c87b03e5Sespie 	  && reg_mentioned_p (label, y))
468*c87b03e5Sespie 	break;
469*c87b03e5Sespie     }
470*c87b03e5Sespie 
471*c87b03e5Sespie   if (i >= 2)
472*c87b03e5Sespie     return NULL_RTX;
473*c87b03e5Sespie 
474*c87b03e5Sespie   x = XEXP (x, 1 - i);
475*c87b03e5Sespie 
476*c87b03e5Sespie   /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM.  */
477*c87b03e5Sespie   if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
478*c87b03e5Sespie     for (i = 0; i < 2; i++)
479*c87b03e5Sespie       if (XEXP (x, i) == pic_offset_table_rtx)
480*c87b03e5Sespie 	{
481*c87b03e5Sespie 	  x = XEXP (x, 1 - i);
482*c87b03e5Sespie 	  break;
483*c87b03e5Sespie 	}
484*c87b03e5Sespie 
485*c87b03e5Sespie   if (earliest)
486*c87b03e5Sespie     *earliest = insn;
487*c87b03e5Sespie 
488*c87b03e5Sespie   /* Return the RTL expression representing the offset.  */
489*c87b03e5Sespie   return x;
490*c87b03e5Sespie }
491*c87b03e5Sespie 
492*c87b03e5Sespie /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
493*c87b03e5Sespie    a global register.  */
494*c87b03e5Sespie 
495*c87b03e5Sespie static int
global_reg_mentioned_p_1(loc,data)496*c87b03e5Sespie global_reg_mentioned_p_1 (loc, data)
497*c87b03e5Sespie      rtx *loc;
498*c87b03e5Sespie      void *data ATTRIBUTE_UNUSED;
499*c87b03e5Sespie {
500*c87b03e5Sespie   int regno;
501*c87b03e5Sespie   rtx x = *loc;
502*c87b03e5Sespie 
503*c87b03e5Sespie   if (! x)
504*c87b03e5Sespie     return 0;
505*c87b03e5Sespie 
506*c87b03e5Sespie   switch (GET_CODE (x))
507*c87b03e5Sespie     {
508*c87b03e5Sespie     case SUBREG:
509*c87b03e5Sespie       if (GET_CODE (SUBREG_REG (x)) == REG)
510*c87b03e5Sespie 	{
511*c87b03e5Sespie 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
512*c87b03e5Sespie 	      && global_regs[subreg_regno (x)])
513*c87b03e5Sespie 	    return 1;
514*c87b03e5Sespie 	  return 0;
515*c87b03e5Sespie 	}
516*c87b03e5Sespie       break;
517*c87b03e5Sespie 
518*c87b03e5Sespie     case REG:
519*c87b03e5Sespie       regno = REGNO (x);
520*c87b03e5Sespie       if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
521*c87b03e5Sespie 	return 1;
522*c87b03e5Sespie       return 0;
523*c87b03e5Sespie 
524*c87b03e5Sespie     case SCRATCH:
525*c87b03e5Sespie     case PC:
526*c87b03e5Sespie     case CC0:
527*c87b03e5Sespie     case CONST_INT:
528*c87b03e5Sespie     case CONST_DOUBLE:
529*c87b03e5Sespie     case CONST:
530*c87b03e5Sespie     case LABEL_REF:
531*c87b03e5Sespie       return 0;
532*c87b03e5Sespie 
533*c87b03e5Sespie     case CALL:
534*c87b03e5Sespie       /* A non-constant call might use a global register.  */
535*c87b03e5Sespie       return 1;
536*c87b03e5Sespie 
537*c87b03e5Sespie     default:
538*c87b03e5Sespie       break;
539*c87b03e5Sespie     }
540*c87b03e5Sespie 
541*c87b03e5Sespie   return 0;
542*c87b03e5Sespie }
543*c87b03e5Sespie 
544*c87b03e5Sespie /* Returns nonzero if X mentions a global register.  */
545*c87b03e5Sespie 
546*c87b03e5Sespie int
global_reg_mentioned_p(x)547*c87b03e5Sespie global_reg_mentioned_p (x)
548*c87b03e5Sespie      rtx x;
549*c87b03e5Sespie {
550*c87b03e5Sespie 
551*c87b03e5Sespie   if (INSN_P (x))
552*c87b03e5Sespie     {
553*c87b03e5Sespie       if (GET_CODE (x) == CALL_INSN)
554*c87b03e5Sespie 	{
555*c87b03e5Sespie 	  if (! CONST_OR_PURE_CALL_P (x))
556*c87b03e5Sespie 	    return 1;
557*c87b03e5Sespie 	  x = CALL_INSN_FUNCTION_USAGE (x);
558*c87b03e5Sespie 	  if (x == 0)
559*c87b03e5Sespie 	    return 0;
560*c87b03e5Sespie 	}
561*c87b03e5Sespie       else
562*c87b03e5Sespie 	x = PATTERN (x);
563*c87b03e5Sespie     }
564*c87b03e5Sespie 
565*c87b03e5Sespie   return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
566*c87b03e5Sespie }
567*c87b03e5Sespie 
568*c87b03e5Sespie /* Return the number of places FIND appears within X.  If COUNT_DEST is
569*c87b03e5Sespie    zero, we do not count occurrences inside the destination of a SET.  */
570*c87b03e5Sespie 
571*c87b03e5Sespie int
count_occurrences(x,find,count_dest)572*c87b03e5Sespie count_occurrences (x, find, count_dest)
573*c87b03e5Sespie      rtx x, find;
574*c87b03e5Sespie      int count_dest;
575*c87b03e5Sespie {
576*c87b03e5Sespie   int i, j;
577*c87b03e5Sespie   enum rtx_code code;
578*c87b03e5Sespie   const char *format_ptr;
579*c87b03e5Sespie   int count;
580*c87b03e5Sespie 
581*c87b03e5Sespie   if (x == find)
582*c87b03e5Sespie     return 1;
583*c87b03e5Sespie 
584*c87b03e5Sespie   code = GET_CODE (x);
585*c87b03e5Sespie 
586*c87b03e5Sespie   switch (code)
587*c87b03e5Sespie     {
588*c87b03e5Sespie     case REG:
589*c87b03e5Sespie     case CONST_INT:
590*c87b03e5Sespie     case CONST_DOUBLE:
591*c87b03e5Sespie     case CONST_VECTOR:
592*c87b03e5Sespie     case SYMBOL_REF:
593*c87b03e5Sespie     case CODE_LABEL:
594*c87b03e5Sespie     case PC:
595*c87b03e5Sespie     case CC0:
596*c87b03e5Sespie       return 0;
597*c87b03e5Sespie 
598*c87b03e5Sespie     case MEM:
599*c87b03e5Sespie       if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
600*c87b03e5Sespie 	return 1;
601*c87b03e5Sespie       break;
602*c87b03e5Sespie 
603*c87b03e5Sespie     case SET:
604*c87b03e5Sespie       if (SET_DEST (x) == find && ! count_dest)
605*c87b03e5Sespie 	return count_occurrences (SET_SRC (x), find, count_dest);
606*c87b03e5Sespie       break;
607*c87b03e5Sespie 
608*c87b03e5Sespie     default:
609*c87b03e5Sespie       break;
610*c87b03e5Sespie     }
611*c87b03e5Sespie 
612*c87b03e5Sespie   format_ptr = GET_RTX_FORMAT (code);
613*c87b03e5Sespie   count = 0;
614*c87b03e5Sespie 
615*c87b03e5Sespie   for (i = 0; i < GET_RTX_LENGTH (code); i++)
616*c87b03e5Sespie     {
617*c87b03e5Sespie       switch (*format_ptr++)
618*c87b03e5Sespie 	{
619*c87b03e5Sespie 	case 'e':
620*c87b03e5Sespie 	  count += count_occurrences (XEXP (x, i), find, count_dest);
621*c87b03e5Sespie 	  break;
622*c87b03e5Sespie 
623*c87b03e5Sespie 	case 'E':
624*c87b03e5Sespie 	  for (j = 0; j < XVECLEN (x, i); j++)
625*c87b03e5Sespie 	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
626*c87b03e5Sespie 	  break;
627*c87b03e5Sespie 	}
628*c87b03e5Sespie     }
629*c87b03e5Sespie   return count;
630*c87b03e5Sespie }
631*c87b03e5Sespie 
632*c87b03e5Sespie /* Nonzero if register REG appears somewhere within IN.
633*c87b03e5Sespie    Also works if REG is not a register; in this case it checks
634*c87b03e5Sespie    for a subexpression of IN that is Lisp "equal" to REG.  */
635*c87b03e5Sespie 
636*c87b03e5Sespie int
reg_mentioned_p(reg,in)637*c87b03e5Sespie reg_mentioned_p (reg, in)
638*c87b03e5Sespie      rtx reg, in;
639*c87b03e5Sespie {
640*c87b03e5Sespie   const char *fmt;
641*c87b03e5Sespie   int i;
642*c87b03e5Sespie   enum rtx_code code;
643*c87b03e5Sespie 
644*c87b03e5Sespie   if (in == 0)
645*c87b03e5Sespie     return 0;
646*c87b03e5Sespie 
647*c87b03e5Sespie   if (reg == in)
648*c87b03e5Sespie     return 1;
649*c87b03e5Sespie 
650*c87b03e5Sespie   if (GET_CODE (in) == LABEL_REF)
651*c87b03e5Sespie     return reg == XEXP (in, 0);
652*c87b03e5Sespie 
653*c87b03e5Sespie   code = GET_CODE (in);
654*c87b03e5Sespie 
655*c87b03e5Sespie   switch (code)
656*c87b03e5Sespie     {
657*c87b03e5Sespie       /* Compare registers by number.  */
658*c87b03e5Sespie     case REG:
659*c87b03e5Sespie       return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
660*c87b03e5Sespie 
661*c87b03e5Sespie       /* These codes have no constituent expressions
662*c87b03e5Sespie 	 and are unique.  */
663*c87b03e5Sespie     case SCRATCH:
664*c87b03e5Sespie     case CC0:
665*c87b03e5Sespie     case PC:
666*c87b03e5Sespie       return 0;
667*c87b03e5Sespie 
668*c87b03e5Sespie     case CONST_INT:
669*c87b03e5Sespie       return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
670*c87b03e5Sespie 
671*c87b03e5Sespie     case CONST_VECTOR:
672*c87b03e5Sespie     case CONST_DOUBLE:
673*c87b03e5Sespie       /* These are kept unique for a given value.  */
674*c87b03e5Sespie       return 0;
675*c87b03e5Sespie 
676*c87b03e5Sespie     default:
677*c87b03e5Sespie       break;
678*c87b03e5Sespie     }
679*c87b03e5Sespie 
680*c87b03e5Sespie   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
681*c87b03e5Sespie     return 1;
682*c87b03e5Sespie 
683*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
684*c87b03e5Sespie 
685*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
686*c87b03e5Sespie     {
687*c87b03e5Sespie       if (fmt[i] == 'E')
688*c87b03e5Sespie 	{
689*c87b03e5Sespie 	  int j;
690*c87b03e5Sespie 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
691*c87b03e5Sespie 	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
692*c87b03e5Sespie 	      return 1;
693*c87b03e5Sespie 	}
694*c87b03e5Sespie       else if (fmt[i] == 'e'
695*c87b03e5Sespie 	       && reg_mentioned_p (reg, XEXP (in, i)))
696*c87b03e5Sespie 	return 1;
697*c87b03e5Sespie     }
698*c87b03e5Sespie   return 0;
699*c87b03e5Sespie }
700*c87b03e5Sespie 
701*c87b03e5Sespie /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
702*c87b03e5Sespie    no CODE_LABEL insn.  */
703*c87b03e5Sespie 
704*c87b03e5Sespie int
no_labels_between_p(beg,end)705*c87b03e5Sespie no_labels_between_p (beg, end)
706*c87b03e5Sespie      rtx beg, end;
707*c87b03e5Sespie {
708*c87b03e5Sespie   rtx p;
709*c87b03e5Sespie   if (beg == end)
710*c87b03e5Sespie     return 0;
711*c87b03e5Sespie   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
712*c87b03e5Sespie     if (GET_CODE (p) == CODE_LABEL)
713*c87b03e5Sespie       return 0;
714*c87b03e5Sespie   return 1;
715*c87b03e5Sespie }
716*c87b03e5Sespie 
717*c87b03e5Sespie /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
718*c87b03e5Sespie    no JUMP_INSN insn.  */
719*c87b03e5Sespie 
720*c87b03e5Sespie int
no_jumps_between_p(beg,end)721*c87b03e5Sespie no_jumps_between_p (beg, end)
722*c87b03e5Sespie      rtx beg, end;
723*c87b03e5Sespie {
724*c87b03e5Sespie   rtx p;
725*c87b03e5Sespie   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
726*c87b03e5Sespie     if (GET_CODE (p) == JUMP_INSN)
727*c87b03e5Sespie       return 0;
728*c87b03e5Sespie   return 1;
729*c87b03e5Sespie }
730*c87b03e5Sespie 
731*c87b03e5Sespie /* Nonzero if register REG is used in an insn between
732*c87b03e5Sespie    FROM_INSN and TO_INSN (exclusive of those two).  */
733*c87b03e5Sespie 
734*c87b03e5Sespie int
reg_used_between_p(reg,from_insn,to_insn)735*c87b03e5Sespie reg_used_between_p (reg, from_insn, to_insn)
736*c87b03e5Sespie      rtx reg, from_insn, to_insn;
737*c87b03e5Sespie {
738*c87b03e5Sespie   rtx insn;
739*c87b03e5Sespie 
740*c87b03e5Sespie   if (from_insn == to_insn)
741*c87b03e5Sespie     return 0;
742*c87b03e5Sespie 
743*c87b03e5Sespie   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
744*c87b03e5Sespie     if (INSN_P (insn)
745*c87b03e5Sespie 	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
746*c87b03e5Sespie 	   || (GET_CODE (insn) == CALL_INSN
747*c87b03e5Sespie 	      && (find_reg_fusage (insn, USE, reg)
748*c87b03e5Sespie 		  || find_reg_fusage (insn, CLOBBER, reg)))))
749*c87b03e5Sespie       return 1;
750*c87b03e5Sespie   return 0;
751*c87b03e5Sespie }
752*c87b03e5Sespie 
753*c87b03e5Sespie /* Nonzero if the old value of X, a register, is referenced in BODY.  If X
754*c87b03e5Sespie    is entirely replaced by a new value and the only use is as a SET_DEST,
755*c87b03e5Sespie    we do not consider it a reference.  */
756*c87b03e5Sespie 
757*c87b03e5Sespie int
reg_referenced_p(x,body)758*c87b03e5Sespie reg_referenced_p (x, body)
759*c87b03e5Sespie      rtx x;
760*c87b03e5Sespie      rtx body;
761*c87b03e5Sespie {
762*c87b03e5Sespie   int i;
763*c87b03e5Sespie 
764*c87b03e5Sespie   switch (GET_CODE (body))
765*c87b03e5Sespie     {
766*c87b03e5Sespie     case SET:
767*c87b03e5Sespie       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
768*c87b03e5Sespie 	return 1;
769*c87b03e5Sespie 
770*c87b03e5Sespie       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
771*c87b03e5Sespie 	 of a REG that occupies all of the REG, the insn references X if
772*c87b03e5Sespie 	 it is mentioned in the destination.  */
773*c87b03e5Sespie       if (GET_CODE (SET_DEST (body)) != CC0
774*c87b03e5Sespie 	  && GET_CODE (SET_DEST (body)) != PC
775*c87b03e5Sespie 	  && GET_CODE (SET_DEST (body)) != REG
776*c87b03e5Sespie 	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
777*c87b03e5Sespie 		&& GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
778*c87b03e5Sespie 		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
779*c87b03e5Sespie 		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
780*c87b03e5Sespie 		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
781*c87b03e5Sespie 			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
782*c87b03e5Sespie 	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
783*c87b03e5Sespie 	return 1;
784*c87b03e5Sespie       return 0;
785*c87b03e5Sespie 
786*c87b03e5Sespie     case ASM_OPERANDS:
787*c87b03e5Sespie       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
788*c87b03e5Sespie 	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
789*c87b03e5Sespie 	  return 1;
790*c87b03e5Sespie       return 0;
791*c87b03e5Sespie 
792*c87b03e5Sespie     case CALL:
793*c87b03e5Sespie     case USE:
794*c87b03e5Sespie     case IF_THEN_ELSE:
795*c87b03e5Sespie       return reg_overlap_mentioned_p (x, body);
796*c87b03e5Sespie 
797*c87b03e5Sespie     case TRAP_IF:
798*c87b03e5Sespie       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
799*c87b03e5Sespie 
800*c87b03e5Sespie     case PREFETCH:
801*c87b03e5Sespie       return reg_overlap_mentioned_p (x, XEXP (body, 0));
802*c87b03e5Sespie 
803*c87b03e5Sespie     case UNSPEC:
804*c87b03e5Sespie     case UNSPEC_VOLATILE:
805*c87b03e5Sespie       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
806*c87b03e5Sespie 	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
807*c87b03e5Sespie 	  return 1;
808*c87b03e5Sespie       return 0;
809*c87b03e5Sespie 
810*c87b03e5Sespie     case PARALLEL:
811*c87b03e5Sespie       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
812*c87b03e5Sespie 	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
813*c87b03e5Sespie 	  return 1;
814*c87b03e5Sespie       return 0;
815*c87b03e5Sespie 
816*c87b03e5Sespie     case CLOBBER:
817*c87b03e5Sespie       if (GET_CODE (XEXP (body, 0)) == MEM)
818*c87b03e5Sespie 	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
819*c87b03e5Sespie 	  return 1;
820*c87b03e5Sespie       return 0;
821*c87b03e5Sespie 
822*c87b03e5Sespie     case COND_EXEC:
823*c87b03e5Sespie       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
824*c87b03e5Sespie 	return 1;
825*c87b03e5Sespie       return reg_referenced_p (x, COND_EXEC_CODE (body));
826*c87b03e5Sespie 
827*c87b03e5Sespie     default:
828*c87b03e5Sespie       return 0;
829*c87b03e5Sespie     }
830*c87b03e5Sespie }
831*c87b03e5Sespie 
832*c87b03e5Sespie /* Nonzero if register REG is referenced in an insn between
833*c87b03e5Sespie    FROM_INSN and TO_INSN (exclusive of those two).  Sets of REG do
834*c87b03e5Sespie    not count.  */
835*c87b03e5Sespie 
836*c87b03e5Sespie int
reg_referenced_between_p(reg,from_insn,to_insn)837*c87b03e5Sespie reg_referenced_between_p (reg, from_insn, to_insn)
838*c87b03e5Sespie      rtx reg, from_insn, to_insn;
839*c87b03e5Sespie {
840*c87b03e5Sespie   rtx insn;
841*c87b03e5Sespie 
842*c87b03e5Sespie   if (from_insn == to_insn)
843*c87b03e5Sespie     return 0;
844*c87b03e5Sespie 
845*c87b03e5Sespie   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
846*c87b03e5Sespie     if (INSN_P (insn)
847*c87b03e5Sespie 	&& (reg_referenced_p (reg, PATTERN (insn))
848*c87b03e5Sespie 	   || (GET_CODE (insn) == CALL_INSN
849*c87b03e5Sespie 	      && find_reg_fusage (insn, USE, reg))))
850*c87b03e5Sespie       return 1;
851*c87b03e5Sespie   return 0;
852*c87b03e5Sespie }
853*c87b03e5Sespie 
854*c87b03e5Sespie /* Nonzero if register REG is set or clobbered in an insn between
855*c87b03e5Sespie    FROM_INSN and TO_INSN (exclusive of those two).  */
856*c87b03e5Sespie 
857*c87b03e5Sespie int
reg_set_between_p(reg,from_insn,to_insn)858*c87b03e5Sespie reg_set_between_p (reg, from_insn, to_insn)
859*c87b03e5Sespie      rtx reg, from_insn, to_insn;
860*c87b03e5Sespie {
861*c87b03e5Sespie   rtx insn;
862*c87b03e5Sespie 
863*c87b03e5Sespie   if (from_insn == to_insn)
864*c87b03e5Sespie     return 0;
865*c87b03e5Sespie 
866*c87b03e5Sespie   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
867*c87b03e5Sespie     if (INSN_P (insn) && reg_set_p (reg, insn))
868*c87b03e5Sespie       return 1;
869*c87b03e5Sespie   return 0;
870*c87b03e5Sespie }
871*c87b03e5Sespie 
872*c87b03e5Sespie /* Internals of reg_set_between_p.  */
873*c87b03e5Sespie int
reg_set_p(reg,insn)874*c87b03e5Sespie reg_set_p (reg, insn)
875*c87b03e5Sespie      rtx reg, insn;
876*c87b03e5Sespie {
877*c87b03e5Sespie   rtx body = insn;
878*c87b03e5Sespie 
879*c87b03e5Sespie   /* We can be passed an insn or part of one.  If we are passed an insn,
880*c87b03e5Sespie      check if a side-effect of the insn clobbers REG.  */
881*c87b03e5Sespie   if (INSN_P (insn))
882*c87b03e5Sespie     {
883*c87b03e5Sespie       if (FIND_REG_INC_NOTE (insn, reg)
884*c87b03e5Sespie 	  || (GET_CODE (insn) == CALL_INSN
885*c87b03e5Sespie 	      /* We'd like to test call_used_regs here, but rtlanal.c can't
886*c87b03e5Sespie 		 reference that variable due to its use in genattrtab.  So
887*c87b03e5Sespie 		 we'll just be more conservative.
888*c87b03e5Sespie 
889*c87b03e5Sespie 		 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
890*c87b03e5Sespie 		 information holds all clobbered registers.  */
891*c87b03e5Sespie 	      && ((GET_CODE (reg) == REG
892*c87b03e5Sespie 		   && REGNO (reg) < FIRST_PSEUDO_REGISTER)
893*c87b03e5Sespie 		  || GET_CODE (reg) == MEM
894*c87b03e5Sespie 		  || find_reg_fusage (insn, CLOBBER, reg))))
895*c87b03e5Sespie 	return 1;
896*c87b03e5Sespie 
897*c87b03e5Sespie       body = PATTERN (insn);
898*c87b03e5Sespie     }
899*c87b03e5Sespie 
900*c87b03e5Sespie   return set_of (reg, insn) != NULL_RTX;
901*c87b03e5Sespie }
902*c87b03e5Sespie 
903*c87b03e5Sespie /* Similar to reg_set_between_p, but check all registers in X.  Return 0
904*c87b03e5Sespie    only if none of them are modified between START and END.  Do not
905*c87b03e5Sespie    consider non-registers one way or the other.  */
906*c87b03e5Sespie 
907*c87b03e5Sespie int
regs_set_between_p(x,start,end)908*c87b03e5Sespie regs_set_between_p (x, start, end)
909*c87b03e5Sespie      rtx x;
910*c87b03e5Sespie      rtx start, end;
911*c87b03e5Sespie {
912*c87b03e5Sespie   enum rtx_code code = GET_CODE (x);
913*c87b03e5Sespie   const char *fmt;
914*c87b03e5Sespie   int i, j;
915*c87b03e5Sespie 
916*c87b03e5Sespie   switch (code)
917*c87b03e5Sespie     {
918*c87b03e5Sespie     case CONST_INT:
919*c87b03e5Sespie     case CONST_DOUBLE:
920*c87b03e5Sespie     case CONST_VECTOR:
921*c87b03e5Sespie     case CONST:
922*c87b03e5Sespie     case SYMBOL_REF:
923*c87b03e5Sespie     case LABEL_REF:
924*c87b03e5Sespie     case PC:
925*c87b03e5Sespie     case CC0:
926*c87b03e5Sespie       return 0;
927*c87b03e5Sespie 
928*c87b03e5Sespie     case REG:
929*c87b03e5Sespie       return reg_set_between_p (x, start, end);
930*c87b03e5Sespie 
931*c87b03e5Sespie     default:
932*c87b03e5Sespie       break;
933*c87b03e5Sespie     }
934*c87b03e5Sespie 
935*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
936*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
937*c87b03e5Sespie     {
938*c87b03e5Sespie       if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
939*c87b03e5Sespie 	return 1;
940*c87b03e5Sespie 
941*c87b03e5Sespie       else if (fmt[i] == 'E')
942*c87b03e5Sespie 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
943*c87b03e5Sespie 	  if (regs_set_between_p (XVECEXP (x, i, j), start, end))
944*c87b03e5Sespie 	    return 1;
945*c87b03e5Sespie     }
946*c87b03e5Sespie 
947*c87b03e5Sespie   return 0;
948*c87b03e5Sespie }
949*c87b03e5Sespie 
950*c87b03e5Sespie /* Similar to reg_set_between_p, but check all registers in X.  Return 0
951*c87b03e5Sespie    only if none of them are modified between START and END.  Return 1 if
952*c87b03e5Sespie    X contains a MEM; this routine does not perform any memory aliasing.  */
953*c87b03e5Sespie 
954*c87b03e5Sespie int
modified_between_p(x,start,end)955*c87b03e5Sespie modified_between_p (x, start, end)
956*c87b03e5Sespie      rtx x;
957*c87b03e5Sespie      rtx start, end;
958*c87b03e5Sespie {
959*c87b03e5Sespie   enum rtx_code code = GET_CODE (x);
960*c87b03e5Sespie   const char *fmt;
961*c87b03e5Sespie   int i, j;
962*c87b03e5Sespie 
963*c87b03e5Sespie   switch (code)
964*c87b03e5Sespie     {
965*c87b03e5Sespie     case CONST_INT:
966*c87b03e5Sespie     case CONST_DOUBLE:
967*c87b03e5Sespie     case CONST_VECTOR:
968*c87b03e5Sespie     case CONST:
969*c87b03e5Sespie     case SYMBOL_REF:
970*c87b03e5Sespie     case LABEL_REF:
971*c87b03e5Sespie       return 0;
972*c87b03e5Sespie 
973*c87b03e5Sespie     case PC:
974*c87b03e5Sespie     case CC0:
975*c87b03e5Sespie       return 1;
976*c87b03e5Sespie 
977*c87b03e5Sespie     case MEM:
978*c87b03e5Sespie       /* If the memory is not constant, assume it is modified.  If it is
979*c87b03e5Sespie 	 constant, we still have to check the address.  */
980*c87b03e5Sespie       if (! RTX_UNCHANGING_P (x))
981*c87b03e5Sespie 	return 1;
982*c87b03e5Sespie       break;
983*c87b03e5Sespie 
984*c87b03e5Sespie     case REG:
985*c87b03e5Sespie       return reg_set_between_p (x, start, end);
986*c87b03e5Sespie 
987*c87b03e5Sespie     default:
988*c87b03e5Sespie       break;
989*c87b03e5Sespie     }
990*c87b03e5Sespie 
991*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
992*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
993*c87b03e5Sespie     {
994*c87b03e5Sespie       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
995*c87b03e5Sespie 	return 1;
996*c87b03e5Sespie 
997*c87b03e5Sespie       else if (fmt[i] == 'E')
998*c87b03e5Sespie 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
999*c87b03e5Sespie 	  if (modified_between_p (XVECEXP (x, i, j), start, end))
1000*c87b03e5Sespie 	    return 1;
1001*c87b03e5Sespie     }
1002*c87b03e5Sespie 
1003*c87b03e5Sespie   return 0;
1004*c87b03e5Sespie }
1005*c87b03e5Sespie 
1006*c87b03e5Sespie /* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
1007*c87b03e5Sespie    of them are modified in INSN.  Return 1 if X contains a MEM; this routine
1008*c87b03e5Sespie    does not perform any memory aliasing.  */
1009*c87b03e5Sespie 
1010*c87b03e5Sespie int
modified_in_p(x,insn)1011*c87b03e5Sespie modified_in_p (x, insn)
1012*c87b03e5Sespie      rtx x;
1013*c87b03e5Sespie      rtx insn;
1014*c87b03e5Sespie {
1015*c87b03e5Sespie   enum rtx_code code = GET_CODE (x);
1016*c87b03e5Sespie   const char *fmt;
1017*c87b03e5Sespie   int i, j;
1018*c87b03e5Sespie 
1019*c87b03e5Sespie   switch (code)
1020*c87b03e5Sespie     {
1021*c87b03e5Sespie     case CONST_INT:
1022*c87b03e5Sespie     case CONST_DOUBLE:
1023*c87b03e5Sespie     case CONST_VECTOR:
1024*c87b03e5Sespie     case CONST:
1025*c87b03e5Sespie     case SYMBOL_REF:
1026*c87b03e5Sespie     case LABEL_REF:
1027*c87b03e5Sespie       return 0;
1028*c87b03e5Sespie 
1029*c87b03e5Sespie     case PC:
1030*c87b03e5Sespie     case CC0:
1031*c87b03e5Sespie       return 1;
1032*c87b03e5Sespie 
1033*c87b03e5Sespie     case MEM:
1034*c87b03e5Sespie       /* If the memory is not constant, assume it is modified.  If it is
1035*c87b03e5Sespie 	 constant, we still have to check the address.  */
1036*c87b03e5Sespie       if (! RTX_UNCHANGING_P (x))
1037*c87b03e5Sespie 	return 1;
1038*c87b03e5Sespie       break;
1039*c87b03e5Sespie 
1040*c87b03e5Sespie     case REG:
1041*c87b03e5Sespie       return reg_set_p (x, insn);
1042*c87b03e5Sespie 
1043*c87b03e5Sespie     default:
1044*c87b03e5Sespie       break;
1045*c87b03e5Sespie     }
1046*c87b03e5Sespie 
1047*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
1048*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1049*c87b03e5Sespie     {
1050*c87b03e5Sespie       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1051*c87b03e5Sespie 	return 1;
1052*c87b03e5Sespie 
1053*c87b03e5Sespie       else if (fmt[i] == 'E')
1054*c87b03e5Sespie 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1055*c87b03e5Sespie 	  if (modified_in_p (XVECEXP (x, i, j), insn))
1056*c87b03e5Sespie 	    return 1;
1057*c87b03e5Sespie     }
1058*c87b03e5Sespie 
1059*c87b03e5Sespie   return 0;
1060*c87b03e5Sespie }
1061*c87b03e5Sespie 
1062*c87b03e5Sespie /* Return true if anything in insn X is (anti,output,true) dependent on
1063*c87b03e5Sespie    anything in insn Y.  */
1064*c87b03e5Sespie 
1065*c87b03e5Sespie int
insn_dependent_p(x,y)1066*c87b03e5Sespie insn_dependent_p (x, y)
1067*c87b03e5Sespie      rtx x, y;
1068*c87b03e5Sespie {
1069*c87b03e5Sespie   rtx tmp;
1070*c87b03e5Sespie 
1071*c87b03e5Sespie   if (! INSN_P (x) || ! INSN_P (y))
1072*c87b03e5Sespie     abort ();
1073*c87b03e5Sespie 
1074*c87b03e5Sespie   tmp = PATTERN (y);
1075*c87b03e5Sespie   note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1076*c87b03e5Sespie   if (tmp == NULL_RTX)
1077*c87b03e5Sespie     return 1;
1078*c87b03e5Sespie 
1079*c87b03e5Sespie   tmp = PATTERN (x);
1080*c87b03e5Sespie   note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1081*c87b03e5Sespie   if (tmp == NULL_RTX)
1082*c87b03e5Sespie     return 1;
1083*c87b03e5Sespie 
1084*c87b03e5Sespie   return 0;
1085*c87b03e5Sespie }
1086*c87b03e5Sespie 
1087*c87b03e5Sespie /* A helper routine for insn_dependent_p called through note_stores.  */
1088*c87b03e5Sespie 
1089*c87b03e5Sespie static void
insn_dependent_p_1(x,pat,data)1090*c87b03e5Sespie insn_dependent_p_1 (x, pat, data)
1091*c87b03e5Sespie      rtx x;
1092*c87b03e5Sespie      rtx pat ATTRIBUTE_UNUSED;
1093*c87b03e5Sespie      void *data;
1094*c87b03e5Sespie {
1095*c87b03e5Sespie   rtx * pinsn = (rtx *) data;
1096*c87b03e5Sespie 
1097*c87b03e5Sespie   if (*pinsn && reg_mentioned_p (x, *pinsn))
1098*c87b03e5Sespie     *pinsn = NULL_RTX;
1099*c87b03e5Sespie }
1100*c87b03e5Sespie 
1101*c87b03e5Sespie /* Helper function for set_of.  */
1102*c87b03e5Sespie struct set_of_data
1103*c87b03e5Sespie   {
1104*c87b03e5Sespie     rtx found;
1105*c87b03e5Sespie     rtx pat;
1106*c87b03e5Sespie   };
1107*c87b03e5Sespie 
1108*c87b03e5Sespie static void
set_of_1(x,pat,data1)1109*c87b03e5Sespie set_of_1 (x, pat, data1)
1110*c87b03e5Sespie      rtx x;
1111*c87b03e5Sespie      rtx pat;
1112*c87b03e5Sespie      void *data1;
1113*c87b03e5Sespie {
1114*c87b03e5Sespie    struct set_of_data *data = (struct set_of_data *) (data1);
1115*c87b03e5Sespie    if (rtx_equal_p (x, data->pat)
1116*c87b03e5Sespie        || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1117*c87b03e5Sespie      data->found = pat;
1118*c87b03e5Sespie }
1119*c87b03e5Sespie 
1120*c87b03e5Sespie /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1121*c87b03e5Sespie    (either directly or via STRICT_LOW_PART and similar modifiers).  */
1122*c87b03e5Sespie rtx
set_of(pat,insn)1123*c87b03e5Sespie set_of (pat, insn)
1124*c87b03e5Sespie      rtx pat, insn;
1125*c87b03e5Sespie {
1126*c87b03e5Sespie   struct set_of_data data;
1127*c87b03e5Sespie   data.found = NULL_RTX;
1128*c87b03e5Sespie   data.pat = pat;
1129*c87b03e5Sespie   note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1130*c87b03e5Sespie   return data.found;
1131*c87b03e5Sespie }
1132*c87b03e5Sespie 
1133*c87b03e5Sespie /* Given an INSN, return a SET expression if this insn has only a single SET.
1134*c87b03e5Sespie    It may also have CLOBBERs, USEs, or SET whose output
1135*c87b03e5Sespie    will not be used, which we ignore.  */
1136*c87b03e5Sespie 
1137*c87b03e5Sespie rtx
single_set_2(insn,pat)1138*c87b03e5Sespie single_set_2 (insn, pat)
1139*c87b03e5Sespie      rtx insn, pat;
1140*c87b03e5Sespie {
1141*c87b03e5Sespie   rtx set = NULL;
1142*c87b03e5Sespie   int set_verified = 1;
1143*c87b03e5Sespie   int i;
1144*c87b03e5Sespie 
1145*c87b03e5Sespie   if (GET_CODE (pat) == PARALLEL)
1146*c87b03e5Sespie     {
1147*c87b03e5Sespie       for (i = 0; i < XVECLEN (pat, 0); i++)
1148*c87b03e5Sespie 	{
1149*c87b03e5Sespie 	  rtx sub = XVECEXP (pat, 0, i);
1150*c87b03e5Sespie 	  switch (GET_CODE (sub))
1151*c87b03e5Sespie 	    {
1152*c87b03e5Sespie 	    case USE:
1153*c87b03e5Sespie 	    case CLOBBER:
1154*c87b03e5Sespie 	      break;
1155*c87b03e5Sespie 
1156*c87b03e5Sespie 	    case SET:
1157*c87b03e5Sespie 	      /* We can consider insns having multiple sets, where all
1158*c87b03e5Sespie 		 but one are dead as single set insns.  In common case
1159*c87b03e5Sespie 		 only single set is present in the pattern so we want
1160*c87b03e5Sespie 		 to avoid checking for REG_UNUSED notes unless necessary.
1161*c87b03e5Sespie 
1162*c87b03e5Sespie 		 When we reach set first time, we just expect this is
1163*c87b03e5Sespie 		 the single set we are looking for and only when more
1164*c87b03e5Sespie 		 sets are found in the insn, we check them.  */
1165*c87b03e5Sespie 	      if (!set_verified)
1166*c87b03e5Sespie 		{
1167*c87b03e5Sespie 		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1168*c87b03e5Sespie 		      && !side_effects_p (set))
1169*c87b03e5Sespie 		    set = NULL;
1170*c87b03e5Sespie 		  else
1171*c87b03e5Sespie 		    set_verified = 1;
1172*c87b03e5Sespie 		}
1173*c87b03e5Sespie 	      if (!set)
1174*c87b03e5Sespie 		set = sub, set_verified = 0;
1175*c87b03e5Sespie 	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1176*c87b03e5Sespie 		       || side_effects_p (sub))
1177*c87b03e5Sespie 		return NULL_RTX;
1178*c87b03e5Sespie 	      break;
1179*c87b03e5Sespie 
1180*c87b03e5Sespie 	    default:
1181*c87b03e5Sespie 	      return NULL_RTX;
1182*c87b03e5Sespie 	    }
1183*c87b03e5Sespie 	}
1184*c87b03e5Sespie     }
1185*c87b03e5Sespie   return set;
1186*c87b03e5Sespie }
1187*c87b03e5Sespie 
1188*c87b03e5Sespie /* Given an INSN, return nonzero if it has more than one SET, else return
1189*c87b03e5Sespie    zero.  */
1190*c87b03e5Sespie 
1191*c87b03e5Sespie int
multiple_sets(insn)1192*c87b03e5Sespie multiple_sets (insn)
1193*c87b03e5Sespie      rtx insn;
1194*c87b03e5Sespie {
1195*c87b03e5Sespie   int found;
1196*c87b03e5Sespie   int i;
1197*c87b03e5Sespie 
1198*c87b03e5Sespie   /* INSN must be an insn.  */
1199*c87b03e5Sespie   if (! INSN_P (insn))
1200*c87b03e5Sespie     return 0;
1201*c87b03e5Sespie 
1202*c87b03e5Sespie   /* Only a PARALLEL can have multiple SETs.  */
1203*c87b03e5Sespie   if (GET_CODE (PATTERN (insn)) == PARALLEL)
1204*c87b03e5Sespie     {
1205*c87b03e5Sespie       for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1206*c87b03e5Sespie 	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1207*c87b03e5Sespie 	  {
1208*c87b03e5Sespie 	    /* If we have already found a SET, then return now.  */
1209*c87b03e5Sespie 	    if (found)
1210*c87b03e5Sespie 	      return 1;
1211*c87b03e5Sespie 	    else
1212*c87b03e5Sespie 	      found = 1;
1213*c87b03e5Sespie 	  }
1214*c87b03e5Sespie     }
1215*c87b03e5Sespie 
1216*c87b03e5Sespie   /* Either zero or one SET.  */
1217*c87b03e5Sespie   return 0;
1218*c87b03e5Sespie }
1219*c87b03e5Sespie 
1220*c87b03e5Sespie /* Return nonzero if the destination of SET equals the source
1221*c87b03e5Sespie    and there are no side effects.  */
1222*c87b03e5Sespie 
1223*c87b03e5Sespie int
set_noop_p(set)1224*c87b03e5Sespie set_noop_p (set)
1225*c87b03e5Sespie      rtx set;
1226*c87b03e5Sespie {
1227*c87b03e5Sespie   rtx src = SET_SRC (set);
1228*c87b03e5Sespie   rtx dst = SET_DEST (set);
1229*c87b03e5Sespie 
1230*c87b03e5Sespie   if (side_effects_p (src) || side_effects_p (dst))
1231*c87b03e5Sespie     return 0;
1232*c87b03e5Sespie 
1233*c87b03e5Sespie   if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1234*c87b03e5Sespie     return rtx_equal_p (dst, src);
1235*c87b03e5Sespie 
1236*c87b03e5Sespie   if (dst == pc_rtx && src == pc_rtx)
1237*c87b03e5Sespie     return 1;
1238*c87b03e5Sespie 
1239*c87b03e5Sespie   if (GET_CODE (dst) == SIGN_EXTRACT
1240*c87b03e5Sespie       || GET_CODE (dst) == ZERO_EXTRACT)
1241*c87b03e5Sespie     return rtx_equal_p (XEXP (dst, 0), src)
1242*c87b03e5Sespie 	   && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1243*c87b03e5Sespie 
1244*c87b03e5Sespie   if (GET_CODE (dst) == STRICT_LOW_PART)
1245*c87b03e5Sespie     dst = XEXP (dst, 0);
1246*c87b03e5Sespie 
1247*c87b03e5Sespie   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1248*c87b03e5Sespie     {
1249*c87b03e5Sespie       if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1250*c87b03e5Sespie 	return 0;
1251*c87b03e5Sespie       src = SUBREG_REG (src);
1252*c87b03e5Sespie       dst = SUBREG_REG (dst);
1253*c87b03e5Sespie     }
1254*c87b03e5Sespie 
1255*c87b03e5Sespie   return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1256*c87b03e5Sespie 	  && REGNO (src) == REGNO (dst));
1257*c87b03e5Sespie }
1258*c87b03e5Sespie 
1259*c87b03e5Sespie /* Return nonzero if an insn consists only of SETs, each of which only sets a
1260*c87b03e5Sespie    value to itself.  */
1261*c87b03e5Sespie 
1262*c87b03e5Sespie int
noop_move_p(insn)1263*c87b03e5Sespie noop_move_p (insn)
1264*c87b03e5Sespie      rtx insn;
1265*c87b03e5Sespie {
1266*c87b03e5Sespie   rtx pat = PATTERN (insn);
1267*c87b03e5Sespie 
1268*c87b03e5Sespie   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1269*c87b03e5Sespie     return 1;
1270*c87b03e5Sespie 
1271*c87b03e5Sespie   /* Insns carrying these notes are useful later on.  */
1272*c87b03e5Sespie   if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1273*c87b03e5Sespie     return 0;
1274*c87b03e5Sespie 
1275*c87b03e5Sespie   /* For now treat an insn with a REG_RETVAL note as a
1276*c87b03e5Sespie      a special insn which should not be considered a no-op.  */
1277*c87b03e5Sespie   if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1278*c87b03e5Sespie     return 0;
1279*c87b03e5Sespie 
1280*c87b03e5Sespie   if (GET_CODE (pat) == SET && set_noop_p (pat))
1281*c87b03e5Sespie     return 1;
1282*c87b03e5Sespie 
1283*c87b03e5Sespie   if (GET_CODE (pat) == PARALLEL)
1284*c87b03e5Sespie     {
1285*c87b03e5Sespie       int i;
1286*c87b03e5Sespie       /* If nothing but SETs of registers to themselves,
1287*c87b03e5Sespie 	 this insn can also be deleted.  */
1288*c87b03e5Sespie       for (i = 0; i < XVECLEN (pat, 0); i++)
1289*c87b03e5Sespie 	{
1290*c87b03e5Sespie 	  rtx tem = XVECEXP (pat, 0, i);
1291*c87b03e5Sespie 
1292*c87b03e5Sespie 	  if (GET_CODE (tem) == USE
1293*c87b03e5Sespie 	      || GET_CODE (tem) == CLOBBER)
1294*c87b03e5Sespie 	    continue;
1295*c87b03e5Sespie 
1296*c87b03e5Sespie 	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1297*c87b03e5Sespie 	    return 0;
1298*c87b03e5Sespie 	}
1299*c87b03e5Sespie 
1300*c87b03e5Sespie       return 1;
1301*c87b03e5Sespie     }
1302*c87b03e5Sespie   return 0;
1303*c87b03e5Sespie }
1304*c87b03e5Sespie 
1305*c87b03e5Sespie 
1306*c87b03e5Sespie /* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1307*c87b03e5Sespie    is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1308*c87b03e5Sespie    If the object was modified, if we hit a partial assignment to X, or hit a
1309*c87b03e5Sespie    CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1310*c87b03e5Sespie    point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1311*c87b03e5Sespie    be the src.  */
1312*c87b03e5Sespie 
1313*c87b03e5Sespie rtx
find_last_value(x,pinsn,valid_to,allow_hwreg)1314*c87b03e5Sespie find_last_value (x, pinsn, valid_to, allow_hwreg)
1315*c87b03e5Sespie      rtx x;
1316*c87b03e5Sespie      rtx *pinsn;
1317*c87b03e5Sespie      rtx valid_to;
1318*c87b03e5Sespie      int allow_hwreg;
1319*c87b03e5Sespie {
1320*c87b03e5Sespie   rtx p;
1321*c87b03e5Sespie 
1322*c87b03e5Sespie   for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1323*c87b03e5Sespie        p = PREV_INSN (p))
1324*c87b03e5Sespie     if (INSN_P (p))
1325*c87b03e5Sespie       {
1326*c87b03e5Sespie 	rtx set = single_set (p);
1327*c87b03e5Sespie 	rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1328*c87b03e5Sespie 
1329*c87b03e5Sespie 	if (set && rtx_equal_p (x, SET_DEST (set)))
1330*c87b03e5Sespie 	  {
1331*c87b03e5Sespie 	    rtx src = SET_SRC (set);
1332*c87b03e5Sespie 
1333*c87b03e5Sespie 	    if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1334*c87b03e5Sespie 	      src = XEXP (note, 0);
1335*c87b03e5Sespie 
1336*c87b03e5Sespie 	    if ((valid_to == NULL_RTX
1337*c87b03e5Sespie 		 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1338*c87b03e5Sespie 		/* Reject hard registers because we don't usually want
1339*c87b03e5Sespie 		   to use them; we'd rather use a pseudo.  */
1340*c87b03e5Sespie 		&& (! (GET_CODE (src) == REG
1341*c87b03e5Sespie 		      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1342*c87b03e5Sespie 	      {
1343*c87b03e5Sespie 		*pinsn = p;
1344*c87b03e5Sespie 		return src;
1345*c87b03e5Sespie 	      }
1346*c87b03e5Sespie 	  }
1347*c87b03e5Sespie 
1348*c87b03e5Sespie 	/* If set in non-simple way, we don't have a value.  */
1349*c87b03e5Sespie 	if (reg_set_p (x, p))
1350*c87b03e5Sespie 	  break;
1351*c87b03e5Sespie       }
1352*c87b03e5Sespie 
1353*c87b03e5Sespie   return x;
1354*c87b03e5Sespie }
1355*c87b03e5Sespie 
1356*c87b03e5Sespie /* Return nonzero if register in range [REGNO, ENDREGNO)
1357*c87b03e5Sespie    appears either explicitly or implicitly in X
1358*c87b03e5Sespie    other than being stored into.
1359*c87b03e5Sespie 
1360*c87b03e5Sespie    References contained within the substructure at LOC do not count.
1361*c87b03e5Sespie    LOC may be zero, meaning don't ignore anything.  */
1362*c87b03e5Sespie 
1363*c87b03e5Sespie int
refers_to_regno_p(regno,endregno,x,loc)1364*c87b03e5Sespie refers_to_regno_p (regno, endregno, x, loc)
1365*c87b03e5Sespie      unsigned int regno, endregno;
1366*c87b03e5Sespie      rtx x;
1367*c87b03e5Sespie      rtx *loc;
1368*c87b03e5Sespie {
1369*c87b03e5Sespie   int i;
1370*c87b03e5Sespie   unsigned int x_regno;
1371*c87b03e5Sespie   RTX_CODE code;
1372*c87b03e5Sespie   const char *fmt;
1373*c87b03e5Sespie 
1374*c87b03e5Sespie  repeat:
1375*c87b03e5Sespie   /* The contents of a REG_NONNEG note is always zero, so we must come here
1376*c87b03e5Sespie      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1377*c87b03e5Sespie   if (x == 0)
1378*c87b03e5Sespie     return 0;
1379*c87b03e5Sespie 
1380*c87b03e5Sespie   code = GET_CODE (x);
1381*c87b03e5Sespie 
1382*c87b03e5Sespie   switch (code)
1383*c87b03e5Sespie     {
1384*c87b03e5Sespie     case REG:
1385*c87b03e5Sespie       x_regno = REGNO (x);
1386*c87b03e5Sespie 
1387*c87b03e5Sespie       /* If we modifying the stack, frame, or argument pointer, it will
1388*c87b03e5Sespie 	 clobber a virtual register.  In fact, we could be more precise,
1389*c87b03e5Sespie 	 but it isn't worth it.  */
1390*c87b03e5Sespie       if ((x_regno == STACK_POINTER_REGNUM
1391*c87b03e5Sespie #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1392*c87b03e5Sespie 	   || x_regno == ARG_POINTER_REGNUM
1393*c87b03e5Sespie #endif
1394*c87b03e5Sespie 	   || x_regno == FRAME_POINTER_REGNUM)
1395*c87b03e5Sespie 	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1396*c87b03e5Sespie 	return 1;
1397*c87b03e5Sespie 
1398*c87b03e5Sespie       return (endregno > x_regno
1399*c87b03e5Sespie 	      && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1400*c87b03e5Sespie 				    ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1401*c87b03e5Sespie 			      : 1));
1402*c87b03e5Sespie 
1403*c87b03e5Sespie     case SUBREG:
1404*c87b03e5Sespie       /* If this is a SUBREG of a hard reg, we can see exactly which
1405*c87b03e5Sespie 	 registers are being modified.  Otherwise, handle normally.  */
1406*c87b03e5Sespie       if (GET_CODE (SUBREG_REG (x)) == REG
1407*c87b03e5Sespie 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1408*c87b03e5Sespie 	{
1409*c87b03e5Sespie 	  unsigned int inner_regno = subreg_regno (x);
1410*c87b03e5Sespie 	  unsigned int inner_endregno
1411*c87b03e5Sespie 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1412*c87b03e5Sespie 			     ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1413*c87b03e5Sespie 
1414*c87b03e5Sespie 	  return endregno > inner_regno && regno < inner_endregno;
1415*c87b03e5Sespie 	}
1416*c87b03e5Sespie       break;
1417*c87b03e5Sespie 
1418*c87b03e5Sespie     case CLOBBER:
1419*c87b03e5Sespie     case SET:
1420*c87b03e5Sespie       if (&SET_DEST (x) != loc
1421*c87b03e5Sespie 	  /* Note setting a SUBREG counts as referring to the REG it is in for
1422*c87b03e5Sespie 	     a pseudo but not for hard registers since we can
1423*c87b03e5Sespie 	     treat each word individually.  */
1424*c87b03e5Sespie 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
1425*c87b03e5Sespie 	       && loc != &SUBREG_REG (SET_DEST (x))
1426*c87b03e5Sespie 	       && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1427*c87b03e5Sespie 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1428*c87b03e5Sespie 	       && refers_to_regno_p (regno, endregno,
1429*c87b03e5Sespie 				     SUBREG_REG (SET_DEST (x)), loc))
1430*c87b03e5Sespie 	      || (GET_CODE (SET_DEST (x)) != REG
1431*c87b03e5Sespie 		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1432*c87b03e5Sespie 	return 1;
1433*c87b03e5Sespie 
1434*c87b03e5Sespie       if (code == CLOBBER || loc == &SET_SRC (x))
1435*c87b03e5Sespie 	return 0;
1436*c87b03e5Sespie       x = SET_SRC (x);
1437*c87b03e5Sespie       goto repeat;
1438*c87b03e5Sespie 
1439*c87b03e5Sespie     default:
1440*c87b03e5Sespie       break;
1441*c87b03e5Sespie     }
1442*c87b03e5Sespie 
1443*c87b03e5Sespie   /* X does not match, so try its subexpressions.  */
1444*c87b03e5Sespie 
1445*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
1446*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1447*c87b03e5Sespie     {
1448*c87b03e5Sespie       if (fmt[i] == 'e' && loc != &XEXP (x, i))
1449*c87b03e5Sespie 	{
1450*c87b03e5Sespie 	  if (i == 0)
1451*c87b03e5Sespie 	    {
1452*c87b03e5Sespie 	      x = XEXP (x, 0);
1453*c87b03e5Sespie 	      goto repeat;
1454*c87b03e5Sespie 	    }
1455*c87b03e5Sespie 	  else
1456*c87b03e5Sespie 	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1457*c87b03e5Sespie 	      return 1;
1458*c87b03e5Sespie 	}
1459*c87b03e5Sespie       else if (fmt[i] == 'E')
1460*c87b03e5Sespie 	{
1461*c87b03e5Sespie 	  int j;
1462*c87b03e5Sespie 	  for (j = XVECLEN (x, i) - 1; j >=0; j--)
1463*c87b03e5Sespie 	    if (loc != &XVECEXP (x, i, j)
1464*c87b03e5Sespie 		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1465*c87b03e5Sespie 	      return 1;
1466*c87b03e5Sespie 	}
1467*c87b03e5Sespie     }
1468*c87b03e5Sespie   return 0;
1469*c87b03e5Sespie }
1470*c87b03e5Sespie 
1471*c87b03e5Sespie /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1472*c87b03e5Sespie    we check if any register number in X conflicts with the relevant register
1473*c87b03e5Sespie    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1474*c87b03e5Sespie    contains a MEM (we don't bother checking for memory addresses that can't
1475*c87b03e5Sespie    conflict because we expect this to be a rare case.  */
1476*c87b03e5Sespie 
1477*c87b03e5Sespie int
reg_overlap_mentioned_p(x,in)1478*c87b03e5Sespie reg_overlap_mentioned_p (x, in)
1479*c87b03e5Sespie      rtx x, in;
1480*c87b03e5Sespie {
1481*c87b03e5Sespie   unsigned int regno, endregno;
1482*c87b03e5Sespie 
1483*c87b03e5Sespie   /* Overly conservative.  */
1484*c87b03e5Sespie   if (GET_CODE (x) == STRICT_LOW_PART
1485*c87b03e5Sespie       || GET_CODE (x) == ZERO_EXTRACT
1486*c87b03e5Sespie       || GET_CODE (x) == SIGN_EXTRACT)
1487*c87b03e5Sespie     x = XEXP (x, 0);
1488*c87b03e5Sespie 
1489*c87b03e5Sespie   /* If either argument is a constant, then modifying X can not affect IN.  */
1490*c87b03e5Sespie   if (CONSTANT_P (x) || CONSTANT_P (in))
1491*c87b03e5Sespie     return 0;
1492*c87b03e5Sespie 
1493*c87b03e5Sespie   switch (GET_CODE (x))
1494*c87b03e5Sespie     {
1495*c87b03e5Sespie     case SUBREG:
1496*c87b03e5Sespie       regno = REGNO (SUBREG_REG (x));
1497*c87b03e5Sespie       if (regno < FIRST_PSEUDO_REGISTER)
1498*c87b03e5Sespie 	regno = subreg_regno (x);
1499*c87b03e5Sespie       goto do_reg;
1500*c87b03e5Sespie 
1501*c87b03e5Sespie     case REG:
1502*c87b03e5Sespie       regno = REGNO (x);
1503*c87b03e5Sespie     do_reg:
1504*c87b03e5Sespie       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1505*c87b03e5Sespie 			  ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1506*c87b03e5Sespie       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1507*c87b03e5Sespie 
1508*c87b03e5Sespie     case MEM:
1509*c87b03e5Sespie       {
1510*c87b03e5Sespie 	const char *fmt;
1511*c87b03e5Sespie 	int i;
1512*c87b03e5Sespie 
1513*c87b03e5Sespie 	if (GET_CODE (in) == MEM)
1514*c87b03e5Sespie 	  return 1;
1515*c87b03e5Sespie 
1516*c87b03e5Sespie 	fmt = GET_RTX_FORMAT (GET_CODE (in));
1517*c87b03e5Sespie 	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1518*c87b03e5Sespie 	  if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1519*c87b03e5Sespie 	    return 1;
1520*c87b03e5Sespie 
1521*c87b03e5Sespie 	return 0;
1522*c87b03e5Sespie       }
1523*c87b03e5Sespie 
1524*c87b03e5Sespie     case SCRATCH:
1525*c87b03e5Sespie     case PC:
1526*c87b03e5Sespie     case CC0:
1527*c87b03e5Sespie       return reg_mentioned_p (x, in);
1528*c87b03e5Sespie 
1529*c87b03e5Sespie     case PARALLEL:
1530*c87b03e5Sespie       {
1531*c87b03e5Sespie 	int i;
1532*c87b03e5Sespie 
1533*c87b03e5Sespie 	/* If any register in here refers to it we return true.  */
1534*c87b03e5Sespie 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1535*c87b03e5Sespie 	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
1536*c87b03e5Sespie 	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1537*c87b03e5Sespie 	      return 1;
1538*c87b03e5Sespie 	return 0;
1539*c87b03e5Sespie       }
1540*c87b03e5Sespie 
1541*c87b03e5Sespie     default:
1542*c87b03e5Sespie       break;
1543*c87b03e5Sespie     }
1544*c87b03e5Sespie 
1545*c87b03e5Sespie   abort ();
1546*c87b03e5Sespie }
1547*c87b03e5Sespie 
1548*c87b03e5Sespie /* Return the last value to which REG was set prior to INSN.  If we can't
1549*c87b03e5Sespie    find it easily, return 0.
1550*c87b03e5Sespie 
1551*c87b03e5Sespie    We only return a REG, SUBREG, or constant because it is too hard to
1552*c87b03e5Sespie    check if a MEM remains unchanged.  */
1553*c87b03e5Sespie 
1554*c87b03e5Sespie rtx
reg_set_last(x,insn)1555*c87b03e5Sespie reg_set_last (x, insn)
1556*c87b03e5Sespie      rtx x;
1557*c87b03e5Sespie      rtx insn;
1558*c87b03e5Sespie {
1559*c87b03e5Sespie   rtx orig_insn = insn;
1560*c87b03e5Sespie 
1561*c87b03e5Sespie   /* Scan backwards until reg_set_last_1 changed one of the above flags.
1562*c87b03e5Sespie      Stop when we reach a label or X is a hard reg and we reach a
1563*c87b03e5Sespie      CALL_INSN (if reg_set_last_last_regno is a hard reg).
1564*c87b03e5Sespie 
1565*c87b03e5Sespie      If we find a set of X, ensure that its SET_SRC remains unchanged.  */
1566*c87b03e5Sespie 
1567*c87b03e5Sespie   /* We compare with <= here, because reg_set_last_last_regno
1568*c87b03e5Sespie      is actually the number of the first reg *not* in X.  */
1569*c87b03e5Sespie   for (;
1570*c87b03e5Sespie        insn && GET_CODE (insn) != CODE_LABEL
1571*c87b03e5Sespie        && ! (GET_CODE (insn) == CALL_INSN
1572*c87b03e5Sespie 	     && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1573*c87b03e5Sespie        insn = PREV_INSN (insn))
1574*c87b03e5Sespie     if (INSN_P (insn))
1575*c87b03e5Sespie       {
1576*c87b03e5Sespie 	rtx set = set_of (x, insn);
1577*c87b03e5Sespie 	/* OK, this function modify our register.  See if we understand it.  */
1578*c87b03e5Sespie 	if (set)
1579*c87b03e5Sespie 	  {
1580*c87b03e5Sespie 	    rtx last_value;
1581*c87b03e5Sespie 	    if (GET_CODE (set) != SET || SET_DEST (set) != x)
1582*c87b03e5Sespie 	      return 0;
1583*c87b03e5Sespie 	    last_value = SET_SRC (x);
1584*c87b03e5Sespie 	    if (CONSTANT_P (last_value)
1585*c87b03e5Sespie 		|| ((GET_CODE (last_value) == REG
1586*c87b03e5Sespie 		     || GET_CODE (last_value) == SUBREG)
1587*c87b03e5Sespie 		    && ! reg_set_between_p (last_value,
1588*c87b03e5Sespie 					    insn, orig_insn)))
1589*c87b03e5Sespie 	      return last_value;
1590*c87b03e5Sespie 	    else
1591*c87b03e5Sespie 	      return 0;
1592*c87b03e5Sespie 	  }
1593*c87b03e5Sespie       }
1594*c87b03e5Sespie 
1595*c87b03e5Sespie   return 0;
1596*c87b03e5Sespie }
1597*c87b03e5Sespie 
1598*c87b03e5Sespie /* Call FUN on each register or MEM that is stored into or clobbered by X.
1599*c87b03e5Sespie    (X would be the pattern of an insn).
1600*c87b03e5Sespie    FUN receives two arguments:
1601*c87b03e5Sespie      the REG, MEM, CC0 or PC being stored in or clobbered,
1602*c87b03e5Sespie      the SET or CLOBBER rtx that does the store.
1603*c87b03e5Sespie 
1604*c87b03e5Sespie   If the item being stored in or clobbered is a SUBREG of a hard register,
1605*c87b03e5Sespie   the SUBREG will be passed.  */
1606*c87b03e5Sespie 
1607*c87b03e5Sespie void
note_stores(x,fun,data)1608*c87b03e5Sespie note_stores (x, fun, data)
1609*c87b03e5Sespie      rtx x;
1610*c87b03e5Sespie      void (*fun) PARAMS ((rtx, rtx, void *));
1611*c87b03e5Sespie      void *data;
1612*c87b03e5Sespie {
1613*c87b03e5Sespie   int i;
1614*c87b03e5Sespie 
1615*c87b03e5Sespie   if (GET_CODE (x) == COND_EXEC)
1616*c87b03e5Sespie     x = COND_EXEC_CODE (x);
1617*c87b03e5Sespie 
1618*c87b03e5Sespie   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1619*c87b03e5Sespie     {
1620*c87b03e5Sespie       rtx dest = SET_DEST (x);
1621*c87b03e5Sespie 
1622*c87b03e5Sespie       while ((GET_CODE (dest) == SUBREG
1623*c87b03e5Sespie 	      && (GET_CODE (SUBREG_REG (dest)) != REG
1624*c87b03e5Sespie 		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1625*c87b03e5Sespie 	     || GET_CODE (dest) == ZERO_EXTRACT
1626*c87b03e5Sespie 	     || GET_CODE (dest) == SIGN_EXTRACT
1627*c87b03e5Sespie 	     || GET_CODE (dest) == STRICT_LOW_PART)
1628*c87b03e5Sespie 	dest = XEXP (dest, 0);
1629*c87b03e5Sespie 
1630*c87b03e5Sespie       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1631*c87b03e5Sespie 	 each of whose first operand is a register.  */
1632*c87b03e5Sespie       if (GET_CODE (dest) == PARALLEL)
1633*c87b03e5Sespie 	{
1634*c87b03e5Sespie 	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1635*c87b03e5Sespie 	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1636*c87b03e5Sespie 	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1637*c87b03e5Sespie 	}
1638*c87b03e5Sespie       else
1639*c87b03e5Sespie 	(*fun) (dest, x, data);
1640*c87b03e5Sespie     }
1641*c87b03e5Sespie 
1642*c87b03e5Sespie   else if (GET_CODE (x) == PARALLEL)
1643*c87b03e5Sespie     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1644*c87b03e5Sespie       note_stores (XVECEXP (x, 0, i), fun, data);
1645*c87b03e5Sespie }
1646*c87b03e5Sespie 
1647*c87b03e5Sespie /* Like notes_stores, but call FUN for each expression that is being
1648*c87b03e5Sespie    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1649*c87b03e5Sespie    FUN for each expression, not any interior subexpressions.  FUN receives a
1650*c87b03e5Sespie    pointer to the expression and the DATA passed to this function.
1651*c87b03e5Sespie 
1652*c87b03e5Sespie    Note that this is not quite the same test as that done in reg_referenced_p
1653*c87b03e5Sespie    since that considers something as being referenced if it is being
1654*c87b03e5Sespie    partially set, while we do not.  */
1655*c87b03e5Sespie 
1656*c87b03e5Sespie void
note_uses(pbody,fun,data)1657*c87b03e5Sespie note_uses (pbody, fun, data)
1658*c87b03e5Sespie      rtx *pbody;
1659*c87b03e5Sespie      void (*fun) PARAMS ((rtx *, void *));
1660*c87b03e5Sespie      void *data;
1661*c87b03e5Sespie {
1662*c87b03e5Sespie   rtx body = *pbody;
1663*c87b03e5Sespie   int i;
1664*c87b03e5Sespie 
1665*c87b03e5Sespie   switch (GET_CODE (body))
1666*c87b03e5Sespie     {
1667*c87b03e5Sespie     case COND_EXEC:
1668*c87b03e5Sespie       (*fun) (&COND_EXEC_TEST (body), data);
1669*c87b03e5Sespie       note_uses (&COND_EXEC_CODE (body), fun, data);
1670*c87b03e5Sespie       return;
1671*c87b03e5Sespie 
1672*c87b03e5Sespie     case PARALLEL:
1673*c87b03e5Sespie       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1674*c87b03e5Sespie 	note_uses (&XVECEXP (body, 0, i), fun, data);
1675*c87b03e5Sespie       return;
1676*c87b03e5Sespie 
1677*c87b03e5Sespie     case USE:
1678*c87b03e5Sespie       (*fun) (&XEXP (body, 0), data);
1679*c87b03e5Sespie       return;
1680*c87b03e5Sespie 
1681*c87b03e5Sespie     case ASM_OPERANDS:
1682*c87b03e5Sespie       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1683*c87b03e5Sespie 	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1684*c87b03e5Sespie       return;
1685*c87b03e5Sespie 
1686*c87b03e5Sespie     case TRAP_IF:
1687*c87b03e5Sespie       (*fun) (&TRAP_CONDITION (body), data);
1688*c87b03e5Sespie       return;
1689*c87b03e5Sespie 
1690*c87b03e5Sespie     case PREFETCH:
1691*c87b03e5Sespie       (*fun) (&XEXP (body, 0), data);
1692*c87b03e5Sespie       return;
1693*c87b03e5Sespie 
1694*c87b03e5Sespie     case UNSPEC:
1695*c87b03e5Sespie     case UNSPEC_VOLATILE:
1696*c87b03e5Sespie       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1697*c87b03e5Sespie 	(*fun) (&XVECEXP (body, 0, i), data);
1698*c87b03e5Sespie       return;
1699*c87b03e5Sespie 
1700*c87b03e5Sespie     case CLOBBER:
1701*c87b03e5Sespie       if (GET_CODE (XEXP (body, 0)) == MEM)
1702*c87b03e5Sespie 	(*fun) (&XEXP (XEXP (body, 0), 0), data);
1703*c87b03e5Sespie       return;
1704*c87b03e5Sespie 
1705*c87b03e5Sespie     case SET:
1706*c87b03e5Sespie       {
1707*c87b03e5Sespie 	rtx dest = SET_DEST (body);
1708*c87b03e5Sespie 
1709*c87b03e5Sespie 	/* For sets we replace everything in source plus registers in memory
1710*c87b03e5Sespie 	   expression in store and operands of a ZERO_EXTRACT.  */
1711*c87b03e5Sespie 	(*fun) (&SET_SRC (body), data);
1712*c87b03e5Sespie 
1713*c87b03e5Sespie 	if (GET_CODE (dest) == ZERO_EXTRACT)
1714*c87b03e5Sespie 	  {
1715*c87b03e5Sespie 	    (*fun) (&XEXP (dest, 1), data);
1716*c87b03e5Sespie 	    (*fun) (&XEXP (dest, 2), data);
1717*c87b03e5Sespie 	  }
1718*c87b03e5Sespie 
1719*c87b03e5Sespie 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1720*c87b03e5Sespie 	  dest = XEXP (dest, 0);
1721*c87b03e5Sespie 
1722*c87b03e5Sespie 	if (GET_CODE (dest) == MEM)
1723*c87b03e5Sespie 	  (*fun) (&XEXP (dest, 0), data);
1724*c87b03e5Sespie       }
1725*c87b03e5Sespie       return;
1726*c87b03e5Sespie 
1727*c87b03e5Sespie     default:
1728*c87b03e5Sespie       /* All the other possibilities never store.  */
1729*c87b03e5Sespie       (*fun) (pbody, data);
1730*c87b03e5Sespie       return;
1731*c87b03e5Sespie     }
1732*c87b03e5Sespie }
1733*c87b03e5Sespie 
1734*c87b03e5Sespie /* Return nonzero if X's old contents don't survive after INSN.
1735*c87b03e5Sespie    This will be true if X is (cc0) or if X is a register and
1736*c87b03e5Sespie    X dies in INSN or because INSN entirely sets X.
1737*c87b03e5Sespie 
1738*c87b03e5Sespie    "Entirely set" means set directly and not through a SUBREG,
1739*c87b03e5Sespie    ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1740*c87b03e5Sespie    Likewise, REG_INC does not count.
1741*c87b03e5Sespie 
1742*c87b03e5Sespie    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1743*c87b03e5Sespie    but for this use that makes no difference, since regs don't overlap
1744*c87b03e5Sespie    during their lifetimes.  Therefore, this function may be used
1745*c87b03e5Sespie    at any time after deaths have been computed (in flow.c).
1746*c87b03e5Sespie 
1747*c87b03e5Sespie    If REG is a hard reg that occupies multiple machine registers, this
1748*c87b03e5Sespie    function will only return 1 if each of those registers will be replaced
1749*c87b03e5Sespie    by INSN.  */
1750*c87b03e5Sespie 
1751*c87b03e5Sespie int
dead_or_set_p(insn,x)1752*c87b03e5Sespie dead_or_set_p (insn, x)
1753*c87b03e5Sespie      rtx insn;
1754*c87b03e5Sespie      rtx x;
1755*c87b03e5Sespie {
1756*c87b03e5Sespie   unsigned int regno, last_regno;
1757*c87b03e5Sespie   unsigned int i;
1758*c87b03e5Sespie 
1759*c87b03e5Sespie   /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1760*c87b03e5Sespie   if (GET_CODE (x) == CC0)
1761*c87b03e5Sespie     return 1;
1762*c87b03e5Sespie 
1763*c87b03e5Sespie   if (GET_CODE (x) != REG)
1764*c87b03e5Sespie     abort ();
1765*c87b03e5Sespie 
1766*c87b03e5Sespie   regno = REGNO (x);
1767*c87b03e5Sespie   last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1768*c87b03e5Sespie 		: regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1769*c87b03e5Sespie 
1770*c87b03e5Sespie   for (i = regno; i <= last_regno; i++)
1771*c87b03e5Sespie     if (! dead_or_set_regno_p (insn, i))
1772*c87b03e5Sespie       return 0;
1773*c87b03e5Sespie 
1774*c87b03e5Sespie   return 1;
1775*c87b03e5Sespie }
1776*c87b03e5Sespie 
1777*c87b03e5Sespie /* Utility function for dead_or_set_p to check an individual register.  Also
1778*c87b03e5Sespie    called from flow.c.  */
1779*c87b03e5Sespie 
1780*c87b03e5Sespie int
dead_or_set_regno_p(insn,test_regno)1781*c87b03e5Sespie dead_or_set_regno_p (insn, test_regno)
1782*c87b03e5Sespie      rtx insn;
1783*c87b03e5Sespie      unsigned int test_regno;
1784*c87b03e5Sespie {
1785*c87b03e5Sespie   unsigned int regno, endregno;
1786*c87b03e5Sespie   rtx pattern;
1787*c87b03e5Sespie 
1788*c87b03e5Sespie   /* See if there is a death note for something that includes TEST_REGNO.  */
1789*c87b03e5Sespie   if (find_regno_note (insn, REG_DEAD, test_regno))
1790*c87b03e5Sespie     return 1;
1791*c87b03e5Sespie 
1792*c87b03e5Sespie   if (GET_CODE (insn) == CALL_INSN
1793*c87b03e5Sespie       && find_regno_fusage (insn, CLOBBER, test_regno))
1794*c87b03e5Sespie     return 1;
1795*c87b03e5Sespie 
1796*c87b03e5Sespie   pattern = PATTERN (insn);
1797*c87b03e5Sespie 
1798*c87b03e5Sespie   if (GET_CODE (pattern) == COND_EXEC)
1799*c87b03e5Sespie     pattern = COND_EXEC_CODE (pattern);
1800*c87b03e5Sespie 
1801*c87b03e5Sespie   if (GET_CODE (pattern) == SET)
1802*c87b03e5Sespie     {
1803*c87b03e5Sespie       rtx dest = SET_DEST (pattern);
1804*c87b03e5Sespie 
1805*c87b03e5Sespie       /* A value is totally replaced if it is the destination or the
1806*c87b03e5Sespie 	 destination is a SUBREG of REGNO that does not change the number of
1807*c87b03e5Sespie 	 words in it.  */
1808*c87b03e5Sespie       if (GET_CODE (dest) == SUBREG
1809*c87b03e5Sespie 	  && (((GET_MODE_SIZE (GET_MODE (dest))
1810*c87b03e5Sespie 		+ UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1811*c87b03e5Sespie 	      == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1812*c87b03e5Sespie 		   + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1813*c87b03e5Sespie 	dest = SUBREG_REG (dest);
1814*c87b03e5Sespie 
1815*c87b03e5Sespie       if (GET_CODE (dest) != REG)
1816*c87b03e5Sespie 	return 0;
1817*c87b03e5Sespie 
1818*c87b03e5Sespie       regno = REGNO (dest);
1819*c87b03e5Sespie       endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1820*c87b03e5Sespie 		  : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1821*c87b03e5Sespie 
1822*c87b03e5Sespie       return (test_regno >= regno && test_regno < endregno);
1823*c87b03e5Sespie     }
1824*c87b03e5Sespie   else if (GET_CODE (pattern) == PARALLEL)
1825*c87b03e5Sespie     {
1826*c87b03e5Sespie       int i;
1827*c87b03e5Sespie 
1828*c87b03e5Sespie       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1829*c87b03e5Sespie 	{
1830*c87b03e5Sespie 	  rtx body = XVECEXP (pattern, 0, i);
1831*c87b03e5Sespie 
1832*c87b03e5Sespie 	  if (GET_CODE (body) == COND_EXEC)
1833*c87b03e5Sespie 	    body = COND_EXEC_CODE (body);
1834*c87b03e5Sespie 
1835*c87b03e5Sespie 	  if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1836*c87b03e5Sespie 	    {
1837*c87b03e5Sespie 	      rtx dest = SET_DEST (body);
1838*c87b03e5Sespie 
1839*c87b03e5Sespie 	      if (GET_CODE (dest) == SUBREG
1840*c87b03e5Sespie 		  && (((GET_MODE_SIZE (GET_MODE (dest))
1841*c87b03e5Sespie 			+ UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1842*c87b03e5Sespie 		      == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1843*c87b03e5Sespie 			   + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1844*c87b03e5Sespie 		dest = SUBREG_REG (dest);
1845*c87b03e5Sespie 
1846*c87b03e5Sespie 	      if (GET_CODE (dest) != REG)
1847*c87b03e5Sespie 		continue;
1848*c87b03e5Sespie 
1849*c87b03e5Sespie 	      regno = REGNO (dest);
1850*c87b03e5Sespie 	      endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1851*c87b03e5Sespie 			  : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1852*c87b03e5Sespie 
1853*c87b03e5Sespie 	      if (test_regno >= regno && test_regno < endregno)
1854*c87b03e5Sespie 		return 1;
1855*c87b03e5Sespie 	    }
1856*c87b03e5Sespie 	}
1857*c87b03e5Sespie     }
1858*c87b03e5Sespie 
1859*c87b03e5Sespie   return 0;
1860*c87b03e5Sespie }
1861*c87b03e5Sespie 
1862*c87b03e5Sespie /* Return the reg-note of kind KIND in insn INSN, if there is one.
1863*c87b03e5Sespie    If DATUM is nonzero, look for one whose datum is DATUM.  */
1864*c87b03e5Sespie 
1865*c87b03e5Sespie rtx
find_reg_note(insn,kind,datum)1866*c87b03e5Sespie find_reg_note (insn, kind, datum)
1867*c87b03e5Sespie      rtx insn;
1868*c87b03e5Sespie      enum reg_note kind;
1869*c87b03e5Sespie      rtx datum;
1870*c87b03e5Sespie {
1871*c87b03e5Sespie   rtx link;
1872*c87b03e5Sespie 
1873*c87b03e5Sespie   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1874*c87b03e5Sespie   if (! INSN_P (insn))
1875*c87b03e5Sespie     return 0;
1876*c87b03e5Sespie 
1877*c87b03e5Sespie   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1878*c87b03e5Sespie     if (REG_NOTE_KIND (link) == kind
1879*c87b03e5Sespie 	&& (datum == 0 || datum == XEXP (link, 0)))
1880*c87b03e5Sespie       return link;
1881*c87b03e5Sespie   return 0;
1882*c87b03e5Sespie }
1883*c87b03e5Sespie 
1884*c87b03e5Sespie /* Return the reg-note of kind KIND in insn INSN which applies to register
1885*c87b03e5Sespie    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1886*c87b03e5Sespie    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1887*c87b03e5Sespie    it might be the case that the note overlaps REGNO.  */
1888*c87b03e5Sespie 
1889*c87b03e5Sespie rtx
find_regno_note(insn,kind,regno)1890*c87b03e5Sespie find_regno_note (insn, kind, regno)
1891*c87b03e5Sespie      rtx insn;
1892*c87b03e5Sespie      enum reg_note kind;
1893*c87b03e5Sespie      unsigned int regno;
1894*c87b03e5Sespie {
1895*c87b03e5Sespie   rtx link;
1896*c87b03e5Sespie 
1897*c87b03e5Sespie   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1898*c87b03e5Sespie   if (! INSN_P (insn))
1899*c87b03e5Sespie     return 0;
1900*c87b03e5Sespie 
1901*c87b03e5Sespie   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1902*c87b03e5Sespie     if (REG_NOTE_KIND (link) == kind
1903*c87b03e5Sespie 	/* Verify that it is a register, so that scratch and MEM won't cause a
1904*c87b03e5Sespie 	   problem here.  */
1905*c87b03e5Sespie 	&& GET_CODE (XEXP (link, 0)) == REG
1906*c87b03e5Sespie 	&& REGNO (XEXP (link, 0)) <= regno
1907*c87b03e5Sespie 	&& ((REGNO (XEXP (link, 0))
1908*c87b03e5Sespie 	     + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1909*c87b03e5Sespie 		: HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1910*c87b03e5Sespie 				    GET_MODE (XEXP (link, 0)))))
1911*c87b03e5Sespie 	    > regno))
1912*c87b03e5Sespie       return link;
1913*c87b03e5Sespie   return 0;
1914*c87b03e5Sespie }
1915*c87b03e5Sespie 
1916*c87b03e5Sespie /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1917*c87b03e5Sespie    has such a note.  */
1918*c87b03e5Sespie 
1919*c87b03e5Sespie rtx
find_reg_equal_equiv_note(insn)1920*c87b03e5Sespie find_reg_equal_equiv_note (insn)
1921*c87b03e5Sespie      rtx insn;
1922*c87b03e5Sespie {
1923*c87b03e5Sespie   rtx note;
1924*c87b03e5Sespie 
1925*c87b03e5Sespie   if (single_set (insn) == 0)
1926*c87b03e5Sespie     return 0;
1927*c87b03e5Sespie   else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1928*c87b03e5Sespie     return note;
1929*c87b03e5Sespie   else
1930*c87b03e5Sespie     return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1931*c87b03e5Sespie }
1932*c87b03e5Sespie 
1933*c87b03e5Sespie /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1934*c87b03e5Sespie    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1935*c87b03e5Sespie 
1936*c87b03e5Sespie int
find_reg_fusage(insn,code,datum)1937*c87b03e5Sespie find_reg_fusage (insn, code, datum)
1938*c87b03e5Sespie      rtx insn;
1939*c87b03e5Sespie      enum rtx_code code;
1940*c87b03e5Sespie      rtx datum;
1941*c87b03e5Sespie {
1942*c87b03e5Sespie   /* If it's not a CALL_INSN, it can't possibly have a
1943*c87b03e5Sespie      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1944*c87b03e5Sespie   if (GET_CODE (insn) != CALL_INSN)
1945*c87b03e5Sespie     return 0;
1946*c87b03e5Sespie 
1947*c87b03e5Sespie   if (! datum)
1948*c87b03e5Sespie     abort ();
1949*c87b03e5Sespie 
1950*c87b03e5Sespie   if (GET_CODE (datum) != REG)
1951*c87b03e5Sespie     {
1952*c87b03e5Sespie       rtx link;
1953*c87b03e5Sespie 
1954*c87b03e5Sespie       for (link = CALL_INSN_FUNCTION_USAGE (insn);
1955*c87b03e5Sespie 	   link;
1956*c87b03e5Sespie 	   link = XEXP (link, 1))
1957*c87b03e5Sespie 	if (GET_CODE (XEXP (link, 0)) == code
1958*c87b03e5Sespie 	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1959*c87b03e5Sespie 	  return 1;
1960*c87b03e5Sespie     }
1961*c87b03e5Sespie   else
1962*c87b03e5Sespie     {
1963*c87b03e5Sespie       unsigned int regno = REGNO (datum);
1964*c87b03e5Sespie 
1965*c87b03e5Sespie       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1966*c87b03e5Sespie 	 to pseudo registers, so don't bother checking.  */
1967*c87b03e5Sespie 
1968*c87b03e5Sespie       if (regno < FIRST_PSEUDO_REGISTER)
1969*c87b03e5Sespie 	{
1970*c87b03e5Sespie 	  unsigned int end_regno
1971*c87b03e5Sespie 	    = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1972*c87b03e5Sespie 	  unsigned int i;
1973*c87b03e5Sespie 
1974*c87b03e5Sespie 	  for (i = regno; i < end_regno; i++)
1975*c87b03e5Sespie 	    if (find_regno_fusage (insn, code, i))
1976*c87b03e5Sespie 	      return 1;
1977*c87b03e5Sespie 	}
1978*c87b03e5Sespie     }
1979*c87b03e5Sespie 
1980*c87b03e5Sespie   return 0;
1981*c87b03e5Sespie }
1982*c87b03e5Sespie 
1983*c87b03e5Sespie /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1984*c87b03e5Sespie    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1985*c87b03e5Sespie 
1986*c87b03e5Sespie int
find_regno_fusage(insn,code,regno)1987*c87b03e5Sespie find_regno_fusage (insn, code, regno)
1988*c87b03e5Sespie      rtx insn;
1989*c87b03e5Sespie      enum rtx_code code;
1990*c87b03e5Sespie      unsigned int regno;
1991*c87b03e5Sespie {
1992*c87b03e5Sespie   rtx link;
1993*c87b03e5Sespie 
1994*c87b03e5Sespie   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1995*c87b03e5Sespie      to pseudo registers, so don't bother checking.  */
1996*c87b03e5Sespie 
1997*c87b03e5Sespie   if (regno >= FIRST_PSEUDO_REGISTER
1998*c87b03e5Sespie       || GET_CODE (insn) != CALL_INSN )
1999*c87b03e5Sespie     return 0;
2000*c87b03e5Sespie 
2001*c87b03e5Sespie   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2002*c87b03e5Sespie     {
2003*c87b03e5Sespie       unsigned int regnote;
2004*c87b03e5Sespie       rtx op, reg;
2005*c87b03e5Sespie 
2006*c87b03e5Sespie       if (GET_CODE (op = XEXP (link, 0)) == code
2007*c87b03e5Sespie 	  && GET_CODE (reg = XEXP (op, 0)) == REG
2008*c87b03e5Sespie 	  && (regnote = REGNO (reg)) <= regno
2009*c87b03e5Sespie 	  && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2010*c87b03e5Sespie 	return 1;
2011*c87b03e5Sespie     }
2012*c87b03e5Sespie 
2013*c87b03e5Sespie   return 0;
2014*c87b03e5Sespie }
2015*c87b03e5Sespie 
2016*c87b03e5Sespie /* Return true if INSN is a call to a pure function.  */
2017*c87b03e5Sespie 
2018*c87b03e5Sespie int
pure_call_p(insn)2019*c87b03e5Sespie pure_call_p (insn)
2020*c87b03e5Sespie      rtx insn;
2021*c87b03e5Sespie {
2022*c87b03e5Sespie   rtx link;
2023*c87b03e5Sespie 
2024*c87b03e5Sespie   if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2025*c87b03e5Sespie     return 0;
2026*c87b03e5Sespie 
2027*c87b03e5Sespie   /* Look for the note that differentiates const and pure functions.  */
2028*c87b03e5Sespie   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2029*c87b03e5Sespie     {
2030*c87b03e5Sespie       rtx u, m;
2031*c87b03e5Sespie 
2032*c87b03e5Sespie       if (GET_CODE (u = XEXP (link, 0)) == USE
2033*c87b03e5Sespie 	  && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2034*c87b03e5Sespie 	  && GET_CODE (XEXP (m, 0)) == SCRATCH)
2035*c87b03e5Sespie 	return 1;
2036*c87b03e5Sespie     }
2037*c87b03e5Sespie 
2038*c87b03e5Sespie   return 0;
2039*c87b03e5Sespie }
2040*c87b03e5Sespie 
2041*c87b03e5Sespie /* Remove register note NOTE from the REG_NOTES of INSN.  */
2042*c87b03e5Sespie 
2043*c87b03e5Sespie void
remove_note(insn,note)2044*c87b03e5Sespie remove_note (insn, note)
2045*c87b03e5Sespie      rtx insn;
2046*c87b03e5Sespie      rtx note;
2047*c87b03e5Sespie {
2048*c87b03e5Sespie   rtx link;
2049*c87b03e5Sespie 
2050*c87b03e5Sespie   if (note == NULL_RTX)
2051*c87b03e5Sespie     return;
2052*c87b03e5Sespie 
2053*c87b03e5Sespie   if (REG_NOTES (insn) == note)
2054*c87b03e5Sespie     {
2055*c87b03e5Sespie       REG_NOTES (insn) = XEXP (note, 1);
2056*c87b03e5Sespie       return;
2057*c87b03e5Sespie     }
2058*c87b03e5Sespie 
2059*c87b03e5Sespie   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2060*c87b03e5Sespie     if (XEXP (link, 1) == note)
2061*c87b03e5Sespie       {
2062*c87b03e5Sespie 	XEXP (link, 1) = XEXP (note, 1);
2063*c87b03e5Sespie 	return;
2064*c87b03e5Sespie       }
2065*c87b03e5Sespie 
2066*c87b03e5Sespie   abort ();
2067*c87b03e5Sespie }
2068*c87b03e5Sespie 
2069*c87b03e5Sespie /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2070*c87b03e5Sespie    return 1 if it is found.  A simple equality test is used to determine if
2071*c87b03e5Sespie    NODE matches.  */
2072*c87b03e5Sespie 
2073*c87b03e5Sespie int
in_expr_list_p(listp,node)2074*c87b03e5Sespie in_expr_list_p (listp, node)
2075*c87b03e5Sespie      rtx listp;
2076*c87b03e5Sespie      rtx node;
2077*c87b03e5Sespie {
2078*c87b03e5Sespie   rtx x;
2079*c87b03e5Sespie 
2080*c87b03e5Sespie   for (x = listp; x; x = XEXP (x, 1))
2081*c87b03e5Sespie     if (node == XEXP (x, 0))
2082*c87b03e5Sespie       return 1;
2083*c87b03e5Sespie 
2084*c87b03e5Sespie   return 0;
2085*c87b03e5Sespie }
2086*c87b03e5Sespie 
2087*c87b03e5Sespie /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2088*c87b03e5Sespie    remove that entry from the list if it is found.
2089*c87b03e5Sespie 
2090*c87b03e5Sespie    A simple equality test is used to determine if NODE matches.  */
2091*c87b03e5Sespie 
2092*c87b03e5Sespie void
remove_node_from_expr_list(node,listp)2093*c87b03e5Sespie remove_node_from_expr_list (node, listp)
2094*c87b03e5Sespie      rtx node;
2095*c87b03e5Sespie      rtx *listp;
2096*c87b03e5Sespie {
2097*c87b03e5Sespie   rtx temp = *listp;
2098*c87b03e5Sespie   rtx prev = NULL_RTX;
2099*c87b03e5Sespie 
2100*c87b03e5Sespie   while (temp)
2101*c87b03e5Sespie     {
2102*c87b03e5Sespie       if (node == XEXP (temp, 0))
2103*c87b03e5Sespie 	{
2104*c87b03e5Sespie 	  /* Splice the node out of the list.  */
2105*c87b03e5Sespie 	  if (prev)
2106*c87b03e5Sespie 	    XEXP (prev, 1) = XEXP (temp, 1);
2107*c87b03e5Sespie 	  else
2108*c87b03e5Sespie 	    *listp = XEXP (temp, 1);
2109*c87b03e5Sespie 
2110*c87b03e5Sespie 	  return;
2111*c87b03e5Sespie 	}
2112*c87b03e5Sespie 
2113*c87b03e5Sespie       prev = temp;
2114*c87b03e5Sespie       temp = XEXP (temp, 1);
2115*c87b03e5Sespie     }
2116*c87b03e5Sespie }
2117*c87b03e5Sespie 
2118*c87b03e5Sespie /* Nonzero if X contains any volatile instructions.  These are instructions
2119*c87b03e5Sespie    which may cause unpredictable machine state instructions, and thus no
2120*c87b03e5Sespie    instructions should be moved or combined across them.  This includes
2121*c87b03e5Sespie    only volatile asms and UNSPEC_VOLATILE instructions.  */
2122*c87b03e5Sespie 
2123*c87b03e5Sespie int
volatile_insn_p(x)2124*c87b03e5Sespie volatile_insn_p (x)
2125*c87b03e5Sespie      rtx x;
2126*c87b03e5Sespie {
2127*c87b03e5Sespie   RTX_CODE code;
2128*c87b03e5Sespie 
2129*c87b03e5Sespie   code = GET_CODE (x);
2130*c87b03e5Sespie   switch (code)
2131*c87b03e5Sespie     {
2132*c87b03e5Sespie     case LABEL_REF:
2133*c87b03e5Sespie     case SYMBOL_REF:
2134*c87b03e5Sespie     case CONST_INT:
2135*c87b03e5Sespie     case CONST:
2136*c87b03e5Sespie     case CONST_DOUBLE:
2137*c87b03e5Sespie     case CONST_VECTOR:
2138*c87b03e5Sespie     case CC0:
2139*c87b03e5Sespie     case PC:
2140*c87b03e5Sespie     case REG:
2141*c87b03e5Sespie     case SCRATCH:
2142*c87b03e5Sespie     case CLOBBER:
2143*c87b03e5Sespie     case ADDR_VEC:
2144*c87b03e5Sespie     case ADDR_DIFF_VEC:
2145*c87b03e5Sespie     case CALL:
2146*c87b03e5Sespie     case MEM:
2147*c87b03e5Sespie       return 0;
2148*c87b03e5Sespie 
2149*c87b03e5Sespie     case UNSPEC_VOLATILE:
2150*c87b03e5Sespie  /* case TRAP_IF: This isn't clear yet.  */
2151*c87b03e5Sespie       return 1;
2152*c87b03e5Sespie 
2153*c87b03e5Sespie     case ASM_INPUT:
2154*c87b03e5Sespie     case ASM_OPERANDS:
2155*c87b03e5Sespie       if (MEM_VOLATILE_P (x))
2156*c87b03e5Sespie 	return 1;
2157*c87b03e5Sespie 
2158*c87b03e5Sespie     default:
2159*c87b03e5Sespie       break;
2160*c87b03e5Sespie     }
2161*c87b03e5Sespie 
2162*c87b03e5Sespie   /* Recursively scan the operands of this expression.  */
2163*c87b03e5Sespie 
2164*c87b03e5Sespie   {
2165*c87b03e5Sespie     const char *fmt = GET_RTX_FORMAT (code);
2166*c87b03e5Sespie     int i;
2167*c87b03e5Sespie 
2168*c87b03e5Sespie     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2169*c87b03e5Sespie       {
2170*c87b03e5Sespie 	if (fmt[i] == 'e')
2171*c87b03e5Sespie 	  {
2172*c87b03e5Sespie 	    if (volatile_insn_p (XEXP (x, i)))
2173*c87b03e5Sespie 	      return 1;
2174*c87b03e5Sespie 	  }
2175*c87b03e5Sespie 	else if (fmt[i] == 'E')
2176*c87b03e5Sespie 	  {
2177*c87b03e5Sespie 	    int j;
2178*c87b03e5Sespie 	    for (j = 0; j < XVECLEN (x, i); j++)
2179*c87b03e5Sespie 	      if (volatile_insn_p (XVECEXP (x, i, j)))
2180*c87b03e5Sespie 		return 1;
2181*c87b03e5Sespie 	  }
2182*c87b03e5Sespie       }
2183*c87b03e5Sespie   }
2184*c87b03e5Sespie   return 0;
2185*c87b03e5Sespie }
2186*c87b03e5Sespie 
2187*c87b03e5Sespie /* Nonzero if X contains any volatile memory references
2188*c87b03e5Sespie    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
2189*c87b03e5Sespie 
2190*c87b03e5Sespie int
volatile_refs_p(x)2191*c87b03e5Sespie volatile_refs_p (x)
2192*c87b03e5Sespie      rtx x;
2193*c87b03e5Sespie {
2194*c87b03e5Sespie   RTX_CODE code;
2195*c87b03e5Sespie 
2196*c87b03e5Sespie   code = GET_CODE (x);
2197*c87b03e5Sespie   switch (code)
2198*c87b03e5Sespie     {
2199*c87b03e5Sespie     case LABEL_REF:
2200*c87b03e5Sespie     case SYMBOL_REF:
2201*c87b03e5Sespie     case CONST_INT:
2202*c87b03e5Sespie     case CONST:
2203*c87b03e5Sespie     case CONST_DOUBLE:
2204*c87b03e5Sespie     case CONST_VECTOR:
2205*c87b03e5Sespie     case CC0:
2206*c87b03e5Sespie     case PC:
2207*c87b03e5Sespie     case REG:
2208*c87b03e5Sespie     case SCRATCH:
2209*c87b03e5Sespie     case CLOBBER:
2210*c87b03e5Sespie     case ADDR_VEC:
2211*c87b03e5Sespie     case ADDR_DIFF_VEC:
2212*c87b03e5Sespie       return 0;
2213*c87b03e5Sespie 
2214*c87b03e5Sespie     case UNSPEC_VOLATILE:
2215*c87b03e5Sespie       return 1;
2216*c87b03e5Sespie 
2217*c87b03e5Sespie     case MEM:
2218*c87b03e5Sespie     case ASM_INPUT:
2219*c87b03e5Sespie     case ASM_OPERANDS:
2220*c87b03e5Sespie       if (MEM_VOLATILE_P (x))
2221*c87b03e5Sespie 	return 1;
2222*c87b03e5Sespie 
2223*c87b03e5Sespie     default:
2224*c87b03e5Sespie       break;
2225*c87b03e5Sespie     }
2226*c87b03e5Sespie 
2227*c87b03e5Sespie   /* Recursively scan the operands of this expression.  */
2228*c87b03e5Sespie 
2229*c87b03e5Sespie   {
2230*c87b03e5Sespie     const char *fmt = GET_RTX_FORMAT (code);
2231*c87b03e5Sespie     int i;
2232*c87b03e5Sespie 
2233*c87b03e5Sespie     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2234*c87b03e5Sespie       {
2235*c87b03e5Sespie 	if (fmt[i] == 'e')
2236*c87b03e5Sespie 	  {
2237*c87b03e5Sespie 	    if (volatile_refs_p (XEXP (x, i)))
2238*c87b03e5Sespie 	      return 1;
2239*c87b03e5Sespie 	  }
2240*c87b03e5Sespie 	else if (fmt[i] == 'E')
2241*c87b03e5Sespie 	  {
2242*c87b03e5Sespie 	    int j;
2243*c87b03e5Sespie 	    for (j = 0; j < XVECLEN (x, i); j++)
2244*c87b03e5Sespie 	      if (volatile_refs_p (XVECEXP (x, i, j)))
2245*c87b03e5Sespie 		return 1;
2246*c87b03e5Sespie 	  }
2247*c87b03e5Sespie       }
2248*c87b03e5Sespie   }
2249*c87b03e5Sespie   return 0;
2250*c87b03e5Sespie }
2251*c87b03e5Sespie 
2252*c87b03e5Sespie /* Similar to above, except that it also rejects register pre- and post-
2253*c87b03e5Sespie    incrementing.  */
2254*c87b03e5Sespie 
2255*c87b03e5Sespie int
side_effects_p(x)2256*c87b03e5Sespie side_effects_p (x)
2257*c87b03e5Sespie      rtx x;
2258*c87b03e5Sespie {
2259*c87b03e5Sespie   RTX_CODE code;
2260*c87b03e5Sespie 
2261*c87b03e5Sespie   code = GET_CODE (x);
2262*c87b03e5Sespie   switch (code)
2263*c87b03e5Sespie     {
2264*c87b03e5Sespie     case LABEL_REF:
2265*c87b03e5Sespie     case SYMBOL_REF:
2266*c87b03e5Sespie     case CONST_INT:
2267*c87b03e5Sespie     case CONST:
2268*c87b03e5Sespie     case CONST_DOUBLE:
2269*c87b03e5Sespie     case CONST_VECTOR:
2270*c87b03e5Sespie     case CC0:
2271*c87b03e5Sespie     case PC:
2272*c87b03e5Sespie     case REG:
2273*c87b03e5Sespie     case SCRATCH:
2274*c87b03e5Sespie     case ADDR_VEC:
2275*c87b03e5Sespie     case ADDR_DIFF_VEC:
2276*c87b03e5Sespie       return 0;
2277*c87b03e5Sespie 
2278*c87b03e5Sespie     case CLOBBER:
2279*c87b03e5Sespie       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2280*c87b03e5Sespie 	 when some combination can't be done.  If we see one, don't think
2281*c87b03e5Sespie 	 that we can simplify the expression.  */
2282*c87b03e5Sespie       return (GET_MODE (x) != VOIDmode);
2283*c87b03e5Sespie 
2284*c87b03e5Sespie     case PRE_INC:
2285*c87b03e5Sespie     case PRE_DEC:
2286*c87b03e5Sespie     case POST_INC:
2287*c87b03e5Sespie     case POST_DEC:
2288*c87b03e5Sespie     case PRE_MODIFY:
2289*c87b03e5Sespie     case POST_MODIFY:
2290*c87b03e5Sespie     case CALL:
2291*c87b03e5Sespie     case UNSPEC_VOLATILE:
2292*c87b03e5Sespie  /* case TRAP_IF: This isn't clear yet.  */
2293*c87b03e5Sespie       return 1;
2294*c87b03e5Sespie 
2295*c87b03e5Sespie     case MEM:
2296*c87b03e5Sespie     case ASM_INPUT:
2297*c87b03e5Sespie     case ASM_OPERANDS:
2298*c87b03e5Sespie       if (MEM_VOLATILE_P (x))
2299*c87b03e5Sespie 	return 1;
2300*c87b03e5Sespie 
2301*c87b03e5Sespie     default:
2302*c87b03e5Sespie       break;
2303*c87b03e5Sespie     }
2304*c87b03e5Sespie 
2305*c87b03e5Sespie   /* Recursively scan the operands of this expression.  */
2306*c87b03e5Sespie 
2307*c87b03e5Sespie   {
2308*c87b03e5Sespie     const char *fmt = GET_RTX_FORMAT (code);
2309*c87b03e5Sespie     int i;
2310*c87b03e5Sespie 
2311*c87b03e5Sespie     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2312*c87b03e5Sespie       {
2313*c87b03e5Sespie 	if (fmt[i] == 'e')
2314*c87b03e5Sespie 	  {
2315*c87b03e5Sespie 	    if (side_effects_p (XEXP (x, i)))
2316*c87b03e5Sespie 	      return 1;
2317*c87b03e5Sespie 	  }
2318*c87b03e5Sespie 	else if (fmt[i] == 'E')
2319*c87b03e5Sespie 	  {
2320*c87b03e5Sespie 	    int j;
2321*c87b03e5Sespie 	    for (j = 0; j < XVECLEN (x, i); j++)
2322*c87b03e5Sespie 	      if (side_effects_p (XVECEXP (x, i, j)))
2323*c87b03e5Sespie 		return 1;
2324*c87b03e5Sespie 	  }
2325*c87b03e5Sespie       }
2326*c87b03e5Sespie   }
2327*c87b03e5Sespie   return 0;
2328*c87b03e5Sespie }
2329*c87b03e5Sespie 
2330*c87b03e5Sespie /* Return nonzero if evaluating rtx X might cause a trap.  */
2331*c87b03e5Sespie 
2332*c87b03e5Sespie int
may_trap_p(x)2333*c87b03e5Sespie may_trap_p (x)
2334*c87b03e5Sespie      rtx x;
2335*c87b03e5Sespie {
2336*c87b03e5Sespie   int i;
2337*c87b03e5Sespie   enum rtx_code code;
2338*c87b03e5Sespie   const char *fmt;
2339*c87b03e5Sespie 
2340*c87b03e5Sespie   if (x == 0)
2341*c87b03e5Sespie     return 0;
2342*c87b03e5Sespie   code = GET_CODE (x);
2343*c87b03e5Sespie   switch (code)
2344*c87b03e5Sespie     {
2345*c87b03e5Sespie       /* Handle these cases quickly.  */
2346*c87b03e5Sespie     case CONST_INT:
2347*c87b03e5Sespie     case CONST_DOUBLE:
2348*c87b03e5Sespie     case CONST_VECTOR:
2349*c87b03e5Sespie     case SYMBOL_REF:
2350*c87b03e5Sespie     case LABEL_REF:
2351*c87b03e5Sespie     case CONST:
2352*c87b03e5Sespie     case PC:
2353*c87b03e5Sespie     case CC0:
2354*c87b03e5Sespie     case REG:
2355*c87b03e5Sespie     case SCRATCH:
2356*c87b03e5Sespie       return 0;
2357*c87b03e5Sespie 
2358*c87b03e5Sespie     case ASM_INPUT:
2359*c87b03e5Sespie     case UNSPEC_VOLATILE:
2360*c87b03e5Sespie     case TRAP_IF:
2361*c87b03e5Sespie       return 1;
2362*c87b03e5Sespie 
2363*c87b03e5Sespie     case ASM_OPERANDS:
2364*c87b03e5Sespie       return MEM_VOLATILE_P (x);
2365*c87b03e5Sespie 
2366*c87b03e5Sespie       /* Memory ref can trap unless it's a static var or a stack slot.  */
2367*c87b03e5Sespie     case MEM:
2368*c87b03e5Sespie       if (MEM_NOTRAP_P (x))
2369*c87b03e5Sespie 	return 0;
2370*c87b03e5Sespie       return rtx_addr_can_trap_p (XEXP (x, 0));
2371*c87b03e5Sespie 
2372*c87b03e5Sespie       /* Division by a non-constant might trap.  */
2373*c87b03e5Sespie     case DIV:
2374*c87b03e5Sespie     case MOD:
2375*c87b03e5Sespie     case UDIV:
2376*c87b03e5Sespie     case UMOD:
2377*c87b03e5Sespie       if (HONOR_SNANS (GET_MODE (x)))
2378*c87b03e5Sespie 	return 1;
2379*c87b03e5Sespie       if (! CONSTANT_P (XEXP (x, 1))
2380*c87b03e5Sespie 	  || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2381*c87b03e5Sespie 	      && flag_trapping_math))
2382*c87b03e5Sespie 	return 1;
2383*c87b03e5Sespie       /* This was const0_rtx, but by not using that,
2384*c87b03e5Sespie 	 we can link this file into other programs.  */
2385*c87b03e5Sespie       if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2386*c87b03e5Sespie 	return 1;
2387*c87b03e5Sespie       break;
2388*c87b03e5Sespie 
2389*c87b03e5Sespie     case EXPR_LIST:
2390*c87b03e5Sespie       /* An EXPR_LIST is used to represent a function call.  This
2391*c87b03e5Sespie 	 certainly may trap.  */
2392*c87b03e5Sespie       return 1;
2393*c87b03e5Sespie 
2394*c87b03e5Sespie     case GE:
2395*c87b03e5Sespie     case GT:
2396*c87b03e5Sespie     case LE:
2397*c87b03e5Sespie     case LT:
2398*c87b03e5Sespie     case COMPARE:
2399*c87b03e5Sespie       /* Some floating point comparisons may trap.  */
2400*c87b03e5Sespie       if (!flag_trapping_math)
2401*c87b03e5Sespie 	break;
2402*c87b03e5Sespie       /* ??? There is no machine independent way to check for tests that trap
2403*c87b03e5Sespie 	 when COMPARE is used, though many targets do make this distinction.
2404*c87b03e5Sespie 	 For instance, sparc uses CCFPE for compares which generate exceptions
2405*c87b03e5Sespie 	 and CCFP for compares which do not generate exceptions.  */
2406*c87b03e5Sespie       if (HONOR_NANS (GET_MODE (x)))
2407*c87b03e5Sespie 	return 1;
2408*c87b03e5Sespie       /* But often the compare has some CC mode, so check operand
2409*c87b03e5Sespie 	 modes as well.  */
2410*c87b03e5Sespie       if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2411*c87b03e5Sespie 	  || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2412*c87b03e5Sespie 	return 1;
2413*c87b03e5Sespie       break;
2414*c87b03e5Sespie 
2415*c87b03e5Sespie     case EQ:
2416*c87b03e5Sespie     case NE:
2417*c87b03e5Sespie       if (HONOR_SNANS (GET_MODE (x)))
2418*c87b03e5Sespie 	return 1;
2419*c87b03e5Sespie       /* Often comparison is CC mode, so check operand modes.  */
2420*c87b03e5Sespie       if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2421*c87b03e5Sespie 	  || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2422*c87b03e5Sespie 	return 1;
2423*c87b03e5Sespie       break;
2424*c87b03e5Sespie 
2425*c87b03e5Sespie     case FIX:
2426*c87b03e5Sespie       /* Conversion of floating point might trap.  */
2427*c87b03e5Sespie       if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2428*c87b03e5Sespie 	return 1;
2429*c87b03e5Sespie       break;
2430*c87b03e5Sespie 
2431*c87b03e5Sespie     case NEG:
2432*c87b03e5Sespie     case ABS:
2433*c87b03e5Sespie       /* These operations don't trap even with floating point.  */
2434*c87b03e5Sespie       break;
2435*c87b03e5Sespie 
2436*c87b03e5Sespie     default:
2437*c87b03e5Sespie       /* Any floating arithmetic may trap.  */
2438*c87b03e5Sespie       if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2439*c87b03e5Sespie 	  && flag_trapping_math)
2440*c87b03e5Sespie 	return 1;
2441*c87b03e5Sespie     }
2442*c87b03e5Sespie 
2443*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
2444*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2445*c87b03e5Sespie     {
2446*c87b03e5Sespie       if (fmt[i] == 'e')
2447*c87b03e5Sespie 	{
2448*c87b03e5Sespie 	  if (may_trap_p (XEXP (x, i)))
2449*c87b03e5Sespie 	    return 1;
2450*c87b03e5Sespie 	}
2451*c87b03e5Sespie       else if (fmt[i] == 'E')
2452*c87b03e5Sespie 	{
2453*c87b03e5Sespie 	  int j;
2454*c87b03e5Sespie 	  for (j = 0; j < XVECLEN (x, i); j++)
2455*c87b03e5Sespie 	    if (may_trap_p (XVECEXP (x, i, j)))
2456*c87b03e5Sespie 	      return 1;
2457*c87b03e5Sespie 	}
2458*c87b03e5Sespie     }
2459*c87b03e5Sespie   return 0;
2460*c87b03e5Sespie }
2461*c87b03e5Sespie 
2462*c87b03e5Sespie /* Return nonzero if X contains a comparison that is not either EQ or NE,
2463*c87b03e5Sespie    i.e., an inequality.  */
2464*c87b03e5Sespie 
2465*c87b03e5Sespie int
inequality_comparisons_p(x)2466*c87b03e5Sespie inequality_comparisons_p (x)
2467*c87b03e5Sespie      rtx x;
2468*c87b03e5Sespie {
2469*c87b03e5Sespie   const char *fmt;
2470*c87b03e5Sespie   int len, i;
2471*c87b03e5Sespie   enum rtx_code code = GET_CODE (x);
2472*c87b03e5Sespie 
2473*c87b03e5Sespie   switch (code)
2474*c87b03e5Sespie     {
2475*c87b03e5Sespie     case REG:
2476*c87b03e5Sespie     case SCRATCH:
2477*c87b03e5Sespie     case PC:
2478*c87b03e5Sespie     case CC0:
2479*c87b03e5Sespie     case CONST_INT:
2480*c87b03e5Sespie     case CONST_DOUBLE:
2481*c87b03e5Sespie     case CONST_VECTOR:
2482*c87b03e5Sespie     case CONST:
2483*c87b03e5Sespie     case LABEL_REF:
2484*c87b03e5Sespie     case SYMBOL_REF:
2485*c87b03e5Sespie       return 0;
2486*c87b03e5Sespie 
2487*c87b03e5Sespie     case LT:
2488*c87b03e5Sespie     case LTU:
2489*c87b03e5Sespie     case GT:
2490*c87b03e5Sespie     case GTU:
2491*c87b03e5Sespie     case LE:
2492*c87b03e5Sespie     case LEU:
2493*c87b03e5Sespie     case GE:
2494*c87b03e5Sespie     case GEU:
2495*c87b03e5Sespie       return 1;
2496*c87b03e5Sespie 
2497*c87b03e5Sespie     default:
2498*c87b03e5Sespie       break;
2499*c87b03e5Sespie     }
2500*c87b03e5Sespie 
2501*c87b03e5Sespie   len = GET_RTX_LENGTH (code);
2502*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
2503*c87b03e5Sespie 
2504*c87b03e5Sespie   for (i = 0; i < len; i++)
2505*c87b03e5Sespie     {
2506*c87b03e5Sespie       if (fmt[i] == 'e')
2507*c87b03e5Sespie 	{
2508*c87b03e5Sespie 	  if (inequality_comparisons_p (XEXP (x, i)))
2509*c87b03e5Sespie 	    return 1;
2510*c87b03e5Sespie 	}
2511*c87b03e5Sespie       else if (fmt[i] == 'E')
2512*c87b03e5Sespie 	{
2513*c87b03e5Sespie 	  int j;
2514*c87b03e5Sespie 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2515*c87b03e5Sespie 	    if (inequality_comparisons_p (XVECEXP (x, i, j)))
2516*c87b03e5Sespie 	      return 1;
2517*c87b03e5Sespie 	}
2518*c87b03e5Sespie     }
2519*c87b03e5Sespie 
2520*c87b03e5Sespie   return 0;
2521*c87b03e5Sespie }
2522*c87b03e5Sespie 
2523*c87b03e5Sespie /* Replace any occurrence of FROM in X with TO.  The function does
2524*c87b03e5Sespie    not enter into CONST_DOUBLE for the replace.
2525*c87b03e5Sespie 
2526*c87b03e5Sespie    Note that copying is not done so X must not be shared unless all copies
2527*c87b03e5Sespie    are to be modified.  */
2528*c87b03e5Sespie 
2529*c87b03e5Sespie rtx
replace_rtx(x,from,to)2530*c87b03e5Sespie replace_rtx (x, from, to)
2531*c87b03e5Sespie      rtx x, from, to;
2532*c87b03e5Sespie {
2533*c87b03e5Sespie   int i, j;
2534*c87b03e5Sespie   const char *fmt;
2535*c87b03e5Sespie 
2536*c87b03e5Sespie   /* The following prevents loops occurrence when we change MEM in
2537*c87b03e5Sespie      CONST_DOUBLE onto the same CONST_DOUBLE.  */
2538*c87b03e5Sespie   if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2539*c87b03e5Sespie     return x;
2540*c87b03e5Sespie 
2541*c87b03e5Sespie   if (x == from)
2542*c87b03e5Sespie     return to;
2543*c87b03e5Sespie 
2544*c87b03e5Sespie   /* Allow this function to make replacements in EXPR_LISTs.  */
2545*c87b03e5Sespie   if (x == 0)
2546*c87b03e5Sespie     return 0;
2547*c87b03e5Sespie 
2548*c87b03e5Sespie   if (GET_CODE (x) == SUBREG)
2549*c87b03e5Sespie     {
2550*c87b03e5Sespie       rtx new = replace_rtx (SUBREG_REG (x), from, to);
2551*c87b03e5Sespie 
2552*c87b03e5Sespie       if (GET_CODE (new) == CONST_INT)
2553*c87b03e5Sespie 	{
2554*c87b03e5Sespie 	  x = simplify_subreg (GET_MODE (x), new,
2555*c87b03e5Sespie 			       GET_MODE (SUBREG_REG (x)),
2556*c87b03e5Sespie 			       SUBREG_BYTE (x));
2557*c87b03e5Sespie 	  if (! x)
2558*c87b03e5Sespie 	    abort ();
2559*c87b03e5Sespie 	}
2560*c87b03e5Sespie       else
2561*c87b03e5Sespie 	SUBREG_REG (x) = new;
2562*c87b03e5Sespie 
2563*c87b03e5Sespie       return x;
2564*c87b03e5Sespie     }
2565*c87b03e5Sespie   else if (GET_CODE (x) == ZERO_EXTEND)
2566*c87b03e5Sespie     {
2567*c87b03e5Sespie       rtx new = replace_rtx (XEXP (x, 0), from, to);
2568*c87b03e5Sespie 
2569*c87b03e5Sespie       if (GET_CODE (new) == CONST_INT)
2570*c87b03e5Sespie 	{
2571*c87b03e5Sespie 	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2572*c87b03e5Sespie 					new, GET_MODE (XEXP (x, 0)));
2573*c87b03e5Sespie 	  if (! x)
2574*c87b03e5Sespie 	    abort ();
2575*c87b03e5Sespie 	}
2576*c87b03e5Sespie       else
2577*c87b03e5Sespie 	XEXP (x, 0) = new;
2578*c87b03e5Sespie 
2579*c87b03e5Sespie       return x;
2580*c87b03e5Sespie     }
2581*c87b03e5Sespie 
2582*c87b03e5Sespie   fmt = GET_RTX_FORMAT (GET_CODE (x));
2583*c87b03e5Sespie   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2584*c87b03e5Sespie     {
2585*c87b03e5Sespie       if (fmt[i] == 'e')
2586*c87b03e5Sespie 	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2587*c87b03e5Sespie       else if (fmt[i] == 'E')
2588*c87b03e5Sespie 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2589*c87b03e5Sespie 	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2590*c87b03e5Sespie     }
2591*c87b03e5Sespie 
2592*c87b03e5Sespie   return x;
2593*c87b03e5Sespie }
2594*c87b03e5Sespie 
2595*c87b03e5Sespie /* Throughout the rtx X, replace many registers according to REG_MAP.
2596*c87b03e5Sespie    Return the replacement for X (which may be X with altered contents).
2597*c87b03e5Sespie    REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2598*c87b03e5Sespie    NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2599*c87b03e5Sespie 
2600*c87b03e5Sespie    We only support REG_MAP entries of REG or SUBREG.  Also, hard registers
2601*c87b03e5Sespie    should not be mapped to pseudos or vice versa since validate_change
2602*c87b03e5Sespie    is not called.
2603*c87b03e5Sespie 
2604*c87b03e5Sespie    If REPLACE_DEST is 1, replacements are also done in destinations;
2605*c87b03e5Sespie    otherwise, only sources are replaced.  */
2606*c87b03e5Sespie 
2607*c87b03e5Sespie rtx
replace_regs(x,reg_map,nregs,replace_dest)2608*c87b03e5Sespie replace_regs (x, reg_map, nregs, replace_dest)
2609*c87b03e5Sespie      rtx x;
2610*c87b03e5Sespie      rtx *reg_map;
2611*c87b03e5Sespie      unsigned int nregs;
2612*c87b03e5Sespie      int replace_dest;
2613*c87b03e5Sespie {
2614*c87b03e5Sespie   enum rtx_code code;
2615*c87b03e5Sespie   int i;
2616*c87b03e5Sespie   const char *fmt;
2617*c87b03e5Sespie 
2618*c87b03e5Sespie   if (x == 0)
2619*c87b03e5Sespie     return x;
2620*c87b03e5Sespie 
2621*c87b03e5Sespie   code = GET_CODE (x);
2622*c87b03e5Sespie   switch (code)
2623*c87b03e5Sespie     {
2624*c87b03e5Sespie     case SCRATCH:
2625*c87b03e5Sespie     case PC:
2626*c87b03e5Sespie     case CC0:
2627*c87b03e5Sespie     case CONST_INT:
2628*c87b03e5Sespie     case CONST_DOUBLE:
2629*c87b03e5Sespie     case CONST_VECTOR:
2630*c87b03e5Sespie     case CONST:
2631*c87b03e5Sespie     case SYMBOL_REF:
2632*c87b03e5Sespie     case LABEL_REF:
2633*c87b03e5Sespie       return x;
2634*c87b03e5Sespie 
2635*c87b03e5Sespie     case REG:
2636*c87b03e5Sespie       /* Verify that the register has an entry before trying to access it.  */
2637*c87b03e5Sespie       if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2638*c87b03e5Sespie 	{
2639*c87b03e5Sespie 	  /* SUBREGs can't be shared.  Always return a copy to ensure that if
2640*c87b03e5Sespie 	     this replacement occurs more than once then each instance will
2641*c87b03e5Sespie 	     get distinct rtx.  */
2642*c87b03e5Sespie 	  if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2643*c87b03e5Sespie 	    return copy_rtx (reg_map[REGNO (x)]);
2644*c87b03e5Sespie 	  return reg_map[REGNO (x)];
2645*c87b03e5Sespie 	}
2646*c87b03e5Sespie       return x;
2647*c87b03e5Sespie 
2648*c87b03e5Sespie     case SUBREG:
2649*c87b03e5Sespie       /* Prevent making nested SUBREGs.  */
2650*c87b03e5Sespie       if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2651*c87b03e5Sespie 	  && reg_map[REGNO (SUBREG_REG (x))] != 0
2652*c87b03e5Sespie 	  && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2653*c87b03e5Sespie 	{
2654*c87b03e5Sespie 	  rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2655*c87b03e5Sespie 	  return simplify_gen_subreg (GET_MODE (x), map_val,
2656*c87b03e5Sespie 				      GET_MODE (SUBREG_REG (x)),
2657*c87b03e5Sespie 				      SUBREG_BYTE (x));
2658*c87b03e5Sespie 	}
2659*c87b03e5Sespie       break;
2660*c87b03e5Sespie 
2661*c87b03e5Sespie     case SET:
2662*c87b03e5Sespie       if (replace_dest)
2663*c87b03e5Sespie 	SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2664*c87b03e5Sespie 
2665*c87b03e5Sespie       else if (GET_CODE (SET_DEST (x)) == MEM
2666*c87b03e5Sespie 	       || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2667*c87b03e5Sespie 	/* Even if we are not to replace destinations, replace register if it
2668*c87b03e5Sespie 	   is CONTAINED in destination (destination is memory or
2669*c87b03e5Sespie 	   STRICT_LOW_PART).  */
2670*c87b03e5Sespie 	XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2671*c87b03e5Sespie 					       reg_map, nregs, 0);
2672*c87b03e5Sespie       else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2673*c87b03e5Sespie 	/* Similarly, for ZERO_EXTRACT we replace all operands.  */
2674*c87b03e5Sespie 	break;
2675*c87b03e5Sespie 
2676*c87b03e5Sespie       SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2677*c87b03e5Sespie       return x;
2678*c87b03e5Sespie 
2679*c87b03e5Sespie     default:
2680*c87b03e5Sespie       break;
2681*c87b03e5Sespie     }
2682*c87b03e5Sespie 
2683*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
2684*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2685*c87b03e5Sespie     {
2686*c87b03e5Sespie       if (fmt[i] == 'e')
2687*c87b03e5Sespie 	XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2688*c87b03e5Sespie       else if (fmt[i] == 'E')
2689*c87b03e5Sespie 	{
2690*c87b03e5Sespie 	  int j;
2691*c87b03e5Sespie 	  for (j = 0; j < XVECLEN (x, i); j++)
2692*c87b03e5Sespie 	    XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2693*c87b03e5Sespie 					      nregs, replace_dest);
2694*c87b03e5Sespie 	}
2695*c87b03e5Sespie     }
2696*c87b03e5Sespie   return x;
2697*c87b03e5Sespie }
2698*c87b03e5Sespie 
2699*c87b03e5Sespie /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2700*c87b03e5Sespie    constant that is not in the constant pool and not in the condition
2701*c87b03e5Sespie    of an IF_THEN_ELSE.  */
2702*c87b03e5Sespie 
2703*c87b03e5Sespie static int
computed_jump_p_1(x)2704*c87b03e5Sespie computed_jump_p_1 (x)
2705*c87b03e5Sespie      rtx x;
2706*c87b03e5Sespie {
2707*c87b03e5Sespie   enum rtx_code code = GET_CODE (x);
2708*c87b03e5Sespie   int i, j;
2709*c87b03e5Sespie   const char *fmt;
2710*c87b03e5Sespie 
2711*c87b03e5Sespie   switch (code)
2712*c87b03e5Sespie     {
2713*c87b03e5Sespie     case LABEL_REF:
2714*c87b03e5Sespie     case PC:
2715*c87b03e5Sespie       return 0;
2716*c87b03e5Sespie 
2717*c87b03e5Sespie     case CONST:
2718*c87b03e5Sespie     case CONST_INT:
2719*c87b03e5Sespie     case CONST_DOUBLE:
2720*c87b03e5Sespie     case CONST_VECTOR:
2721*c87b03e5Sespie     case SYMBOL_REF:
2722*c87b03e5Sespie     case REG:
2723*c87b03e5Sespie       return 1;
2724*c87b03e5Sespie 
2725*c87b03e5Sespie     case MEM:
2726*c87b03e5Sespie       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2727*c87b03e5Sespie 		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2728*c87b03e5Sespie 
2729*c87b03e5Sespie     case IF_THEN_ELSE:
2730*c87b03e5Sespie       return (computed_jump_p_1 (XEXP (x, 1))
2731*c87b03e5Sespie 	      || computed_jump_p_1 (XEXP (x, 2)));
2732*c87b03e5Sespie 
2733*c87b03e5Sespie     default:
2734*c87b03e5Sespie       break;
2735*c87b03e5Sespie     }
2736*c87b03e5Sespie 
2737*c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
2738*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2739*c87b03e5Sespie     {
2740*c87b03e5Sespie       if (fmt[i] == 'e'
2741*c87b03e5Sespie 	  && computed_jump_p_1 (XEXP (x, i)))
2742*c87b03e5Sespie 	return 1;
2743*c87b03e5Sespie 
2744*c87b03e5Sespie       else if (fmt[i] == 'E')
2745*c87b03e5Sespie 	for (j = 0; j < XVECLEN (x, i); j++)
2746*c87b03e5Sespie 	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
2747*c87b03e5Sespie 	    return 1;
2748*c87b03e5Sespie     }
2749*c87b03e5Sespie 
2750*c87b03e5Sespie   return 0;
2751*c87b03e5Sespie }
2752*c87b03e5Sespie 
2753*c87b03e5Sespie /* Return nonzero if INSN is an indirect jump (aka computed jump).
2754*c87b03e5Sespie 
2755*c87b03e5Sespie    Tablejumps and casesi insns are not considered indirect jumps;
2756*c87b03e5Sespie    we can recognize them by a (use (label_ref)).  */
2757*c87b03e5Sespie 
2758*c87b03e5Sespie int
computed_jump_p(insn)2759*c87b03e5Sespie computed_jump_p (insn)
2760*c87b03e5Sespie      rtx insn;
2761*c87b03e5Sespie {
2762*c87b03e5Sespie   int i;
2763*c87b03e5Sespie   if (GET_CODE (insn) == JUMP_INSN)
2764*c87b03e5Sespie     {
2765*c87b03e5Sespie       rtx pat = PATTERN (insn);
2766*c87b03e5Sespie 
2767*c87b03e5Sespie       if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2768*c87b03e5Sespie 	return 0;
2769*c87b03e5Sespie       else if (GET_CODE (pat) == PARALLEL)
2770*c87b03e5Sespie 	{
2771*c87b03e5Sespie 	  int len = XVECLEN (pat, 0);
2772*c87b03e5Sespie 	  int has_use_labelref = 0;
2773*c87b03e5Sespie 
2774*c87b03e5Sespie 	  for (i = len - 1; i >= 0; i--)
2775*c87b03e5Sespie 	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2776*c87b03e5Sespie 		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2777*c87b03e5Sespie 		    == LABEL_REF))
2778*c87b03e5Sespie 	      has_use_labelref = 1;
2779*c87b03e5Sespie 
2780*c87b03e5Sespie 	  if (! has_use_labelref)
2781*c87b03e5Sespie 	    for (i = len - 1; i >= 0; i--)
2782*c87b03e5Sespie 	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2783*c87b03e5Sespie 		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2784*c87b03e5Sespie 		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2785*c87b03e5Sespie 		return 1;
2786*c87b03e5Sespie 	}
2787*c87b03e5Sespie       else if (GET_CODE (pat) == SET
2788*c87b03e5Sespie 	       && SET_DEST (pat) == pc_rtx
2789*c87b03e5Sespie 	       && computed_jump_p_1 (SET_SRC (pat)))
2790*c87b03e5Sespie 	return 1;
2791*c87b03e5Sespie     }
2792*c87b03e5Sespie   return 0;
2793*c87b03e5Sespie }
2794*c87b03e5Sespie 
2795*c87b03e5Sespie /* Traverse X via depth-first search, calling F for each
2796*c87b03e5Sespie    sub-expression (including X itself).  F is also passed the DATA.
2797*c87b03e5Sespie    If F returns -1, do not traverse sub-expressions, but continue
2798*c87b03e5Sespie    traversing the rest of the tree.  If F ever returns any other
2799*c87b03e5Sespie    nonzero value, stop the traversal, and return the value returned
2800*c87b03e5Sespie    by F.  Otherwise, return 0.  This function does not traverse inside
2801*c87b03e5Sespie    tree structure that contains RTX_EXPRs, or into sub-expressions
2802*c87b03e5Sespie    whose format code is `0' since it is not known whether or not those
2803*c87b03e5Sespie    codes are actually RTL.
2804*c87b03e5Sespie 
2805*c87b03e5Sespie    This routine is very general, and could (should?) be used to
2806*c87b03e5Sespie    implement many of the other routines in this file.  */
2807*c87b03e5Sespie 
2808*c87b03e5Sespie int
for_each_rtx(x,f,data)2809*c87b03e5Sespie for_each_rtx (x, f, data)
2810*c87b03e5Sespie      rtx *x;
2811*c87b03e5Sespie      rtx_function f;
2812*c87b03e5Sespie      void *data;
2813*c87b03e5Sespie {
2814*c87b03e5Sespie   int result;
2815*c87b03e5Sespie   int length;
2816*c87b03e5Sespie   const char *format;
2817*c87b03e5Sespie   int i;
2818*c87b03e5Sespie 
2819*c87b03e5Sespie   /* Call F on X.  */
2820*c87b03e5Sespie   result = (*f) (x, data);
2821*c87b03e5Sespie   if (result == -1)
2822*c87b03e5Sespie     /* Do not traverse sub-expressions.  */
2823*c87b03e5Sespie     return 0;
2824*c87b03e5Sespie   else if (result != 0)
2825*c87b03e5Sespie     /* Stop the traversal.  */
2826*c87b03e5Sespie     return result;
2827*c87b03e5Sespie 
2828*c87b03e5Sespie   if (*x == NULL_RTX)
2829*c87b03e5Sespie     /* There are no sub-expressions.  */
2830*c87b03e5Sespie     return 0;
2831*c87b03e5Sespie 
2832*c87b03e5Sespie   length = GET_RTX_LENGTH (GET_CODE (*x));
2833*c87b03e5Sespie   format = GET_RTX_FORMAT (GET_CODE (*x));
2834*c87b03e5Sespie 
2835*c87b03e5Sespie   for (i = 0; i < length; ++i)
2836*c87b03e5Sespie     {
2837*c87b03e5Sespie       switch (format[i])
2838*c87b03e5Sespie 	{
2839*c87b03e5Sespie 	case 'e':
2840*c87b03e5Sespie 	  result = for_each_rtx (&XEXP (*x, i), f, data);
2841*c87b03e5Sespie 	  if (result != 0)
2842*c87b03e5Sespie 	    return result;
2843*c87b03e5Sespie 	  break;
2844*c87b03e5Sespie 
2845*c87b03e5Sespie 	case 'V':
2846*c87b03e5Sespie 	case 'E':
2847*c87b03e5Sespie 	  if (XVEC (*x, i) != 0)
2848*c87b03e5Sespie 	    {
2849*c87b03e5Sespie 	      int j;
2850*c87b03e5Sespie 	      for (j = 0; j < XVECLEN (*x, i); ++j)
2851*c87b03e5Sespie 		{
2852*c87b03e5Sespie 		  result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2853*c87b03e5Sespie 		  if (result != 0)
2854*c87b03e5Sespie 		    return result;
2855*c87b03e5Sespie 		}
2856*c87b03e5Sespie 	    }
2857*c87b03e5Sespie 	  break;
2858*c87b03e5Sespie 
2859*c87b03e5Sespie 	default:
2860*c87b03e5Sespie 	  /* Nothing to do.  */
2861*c87b03e5Sespie 	  break;
2862*c87b03e5Sespie 	}
2863*c87b03e5Sespie 
2864*c87b03e5Sespie     }
2865*c87b03e5Sespie 
2866*c87b03e5Sespie   return 0;
2867*c87b03e5Sespie }
2868*c87b03e5Sespie 
2869*c87b03e5Sespie /* Searches X for any reference to REGNO, returning the rtx of the
2870*c87b03e5Sespie    reference found if any.  Otherwise, returns NULL_RTX.  */
2871*c87b03e5Sespie 
2872*c87b03e5Sespie rtx
regno_use_in(regno,x)2873*c87b03e5Sespie regno_use_in (regno, x)
2874*c87b03e5Sespie      unsigned int regno;
2875*c87b03e5Sespie      rtx x;
2876*c87b03e5Sespie {
2877*c87b03e5Sespie   const char *fmt;
2878*c87b03e5Sespie   int i, j;
2879*c87b03e5Sespie   rtx tem;
2880*c87b03e5Sespie 
2881*c87b03e5Sespie   if (GET_CODE (x) == REG && REGNO (x) == regno)
2882*c87b03e5Sespie     return x;
2883*c87b03e5Sespie 
2884*c87b03e5Sespie   fmt = GET_RTX_FORMAT (GET_CODE (x));
2885*c87b03e5Sespie   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2886*c87b03e5Sespie     {
2887*c87b03e5Sespie       if (fmt[i] == 'e')
2888*c87b03e5Sespie 	{
2889*c87b03e5Sespie 	  if ((tem = regno_use_in (regno, XEXP (x, i))))
2890*c87b03e5Sespie 	    return tem;
2891*c87b03e5Sespie 	}
2892*c87b03e5Sespie       else if (fmt[i] == 'E')
2893*c87b03e5Sespie 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2894*c87b03e5Sespie 	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2895*c87b03e5Sespie 	    return tem;
2896*c87b03e5Sespie     }
2897*c87b03e5Sespie 
2898*c87b03e5Sespie   return NULL_RTX;
2899*c87b03e5Sespie }
2900*c87b03e5Sespie 
2901*c87b03e5Sespie /* Return a value indicating whether OP, an operand of a commutative
2902*c87b03e5Sespie    operation, is preferred as the first or second operand.  The higher
2903*c87b03e5Sespie    the value, the stronger the preference for being the first operand.
2904*c87b03e5Sespie    We use negative values to indicate a preference for the first operand
2905*c87b03e5Sespie    and positive values for the second operand.  */
2906*c87b03e5Sespie 
2907*c87b03e5Sespie int
commutative_operand_precedence(op)2908*c87b03e5Sespie commutative_operand_precedence (op)
2909*c87b03e5Sespie      rtx op;
2910*c87b03e5Sespie {
2911*c87b03e5Sespie   /* Constants always come the second operand.  Prefer "nice" constants.  */
2912*c87b03e5Sespie   if (GET_CODE (op) == CONST_INT)
2913*c87b03e5Sespie     return -5;
2914*c87b03e5Sespie   if (GET_CODE (op) == CONST_DOUBLE)
2915*c87b03e5Sespie     return -4;
2916*c87b03e5Sespie   if (CONSTANT_P (op))
2917*c87b03e5Sespie     return -3;
2918*c87b03e5Sespie 
2919*c87b03e5Sespie   /* SUBREGs of objects should come second.  */
2920*c87b03e5Sespie   if (GET_CODE (op) == SUBREG
2921*c87b03e5Sespie       && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2922*c87b03e5Sespie     return -2;
2923*c87b03e5Sespie 
2924*c87b03e5Sespie   /* If only one operand is a `neg', `not',
2925*c87b03e5Sespie     `mult', `plus', or `minus' expression, it will be the first
2926*c87b03e5Sespie     operand.  */
2927*c87b03e5Sespie   if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2928*c87b03e5Sespie       || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2929*c87b03e5Sespie       || GET_CODE (op) == MINUS)
2930*c87b03e5Sespie     return 2;
2931*c87b03e5Sespie 
2932*c87b03e5Sespie   /* Complex expressions should be the first, so decrease priority
2933*c87b03e5Sespie      of objects.  */
2934*c87b03e5Sespie   if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2935*c87b03e5Sespie     return -1;
2936*c87b03e5Sespie   return 0;
2937*c87b03e5Sespie }
2938*c87b03e5Sespie 
2939*c87b03e5Sespie /* Return 1 iff it is necessary to swap operands of commutative operation
2940*c87b03e5Sespie    in order to canonicalize expression.  */
2941*c87b03e5Sespie 
2942*c87b03e5Sespie int
swap_commutative_operands_p(x,y)2943*c87b03e5Sespie swap_commutative_operands_p (x, y)
2944*c87b03e5Sespie      rtx x, y;
2945*c87b03e5Sespie {
2946*c87b03e5Sespie   return (commutative_operand_precedence (x)
2947*c87b03e5Sespie 	  < commutative_operand_precedence (y));
2948*c87b03e5Sespie }
2949*c87b03e5Sespie 
2950*c87b03e5Sespie /* Return 1 if X is an autoincrement side effect and the register is
2951*c87b03e5Sespie    not the stack pointer.  */
2952*c87b03e5Sespie int
auto_inc_p(x)2953*c87b03e5Sespie auto_inc_p (x)
2954*c87b03e5Sespie      rtx x;
2955*c87b03e5Sespie {
2956*c87b03e5Sespie   switch (GET_CODE (x))
2957*c87b03e5Sespie     {
2958*c87b03e5Sespie     case PRE_INC:
2959*c87b03e5Sespie     case POST_INC:
2960*c87b03e5Sespie     case PRE_DEC:
2961*c87b03e5Sespie     case POST_DEC:
2962*c87b03e5Sespie     case PRE_MODIFY:
2963*c87b03e5Sespie     case POST_MODIFY:
2964*c87b03e5Sespie       /* There are no REG_INC notes for SP.  */
2965*c87b03e5Sespie       if (XEXP (x, 0) != stack_pointer_rtx)
2966*c87b03e5Sespie 	return 1;
2967*c87b03e5Sespie     default:
2968*c87b03e5Sespie       break;
2969*c87b03e5Sespie     }
2970*c87b03e5Sespie   return 0;
2971*c87b03e5Sespie }
2972*c87b03e5Sespie 
2973*c87b03e5Sespie /* Return 1 if the sequence of instructions beginning with FROM and up
2974*c87b03e5Sespie    to and including TO is safe to move.  If NEW_TO is non-NULL, and
2975*c87b03e5Sespie    the sequence is not already safe to move, but can be easily
2976*c87b03e5Sespie    extended to a sequence which is safe, then NEW_TO will point to the
2977*c87b03e5Sespie    end of the extended sequence.
2978*c87b03e5Sespie 
2979*c87b03e5Sespie    For now, this function only checks that the region contains whole
2980*c87b03e5Sespie    exception regions, but it could be extended to check additional
2981*c87b03e5Sespie    conditions as well.  */
2982*c87b03e5Sespie 
2983*c87b03e5Sespie int
insns_safe_to_move_p(from,to,new_to)2984*c87b03e5Sespie insns_safe_to_move_p (from, to, new_to)
2985*c87b03e5Sespie      rtx from;
2986*c87b03e5Sespie      rtx to;
2987*c87b03e5Sespie      rtx *new_to;
2988*c87b03e5Sespie {
2989*c87b03e5Sespie   int eh_region_count = 0;
2990*c87b03e5Sespie   int past_to_p = 0;
2991*c87b03e5Sespie   rtx r = from;
2992*c87b03e5Sespie 
2993*c87b03e5Sespie   /* By default, assume the end of the region will be what was
2994*c87b03e5Sespie      suggested.  */
2995*c87b03e5Sespie   if (new_to)
2996*c87b03e5Sespie     *new_to = to;
2997*c87b03e5Sespie 
2998*c87b03e5Sespie   while (r)
2999*c87b03e5Sespie     {
3000*c87b03e5Sespie       if (GET_CODE (r) == NOTE)
3001*c87b03e5Sespie 	{
3002*c87b03e5Sespie 	  switch (NOTE_LINE_NUMBER (r))
3003*c87b03e5Sespie 	    {
3004*c87b03e5Sespie 	    case NOTE_INSN_EH_REGION_BEG:
3005*c87b03e5Sespie 	      ++eh_region_count;
3006*c87b03e5Sespie 	      break;
3007*c87b03e5Sespie 
3008*c87b03e5Sespie 	    case NOTE_INSN_EH_REGION_END:
3009*c87b03e5Sespie 	      if (eh_region_count == 0)
3010*c87b03e5Sespie 		/* This sequence of instructions contains the end of
3011*c87b03e5Sespie 		   an exception region, but not he beginning.  Moving
3012*c87b03e5Sespie 		   it will cause chaos.  */
3013*c87b03e5Sespie 		return 0;
3014*c87b03e5Sespie 
3015*c87b03e5Sespie 	      --eh_region_count;
3016*c87b03e5Sespie 	      break;
3017*c87b03e5Sespie 
3018*c87b03e5Sespie 	    default:
3019*c87b03e5Sespie 	      break;
3020*c87b03e5Sespie 	    }
3021*c87b03e5Sespie 	}
3022*c87b03e5Sespie       else if (past_to_p)
3023*c87b03e5Sespie 	/* If we've passed TO, and we see a non-note instruction, we
3024*c87b03e5Sespie 	   can't extend the sequence to a movable sequence.  */
3025*c87b03e5Sespie 	return 0;
3026*c87b03e5Sespie 
3027*c87b03e5Sespie       if (r == to)
3028*c87b03e5Sespie 	{
3029*c87b03e5Sespie 	  if (!new_to)
3030*c87b03e5Sespie 	    /* It's OK to move the sequence if there were matched sets of
3031*c87b03e5Sespie 	       exception region notes.  */
3032*c87b03e5Sespie 	    return eh_region_count == 0;
3033*c87b03e5Sespie 
3034*c87b03e5Sespie 	  past_to_p = 1;
3035*c87b03e5Sespie 	}
3036*c87b03e5Sespie 
3037*c87b03e5Sespie       /* It's OK to move the sequence if there were matched sets of
3038*c87b03e5Sespie 	 exception region notes.  */
3039*c87b03e5Sespie       if (past_to_p && eh_region_count == 0)
3040*c87b03e5Sespie 	{
3041*c87b03e5Sespie 	  *new_to = r;
3042*c87b03e5Sespie 	  return 1;
3043*c87b03e5Sespie 	}
3044*c87b03e5Sespie 
3045*c87b03e5Sespie       /* Go to the next instruction.  */
3046*c87b03e5Sespie       r = NEXT_INSN (r);
3047*c87b03e5Sespie     }
3048*c87b03e5Sespie 
3049*c87b03e5Sespie   return 0;
3050*c87b03e5Sespie }
3051*c87b03e5Sespie 
3052*c87b03e5Sespie /* Return nonzero if IN contains a piece of rtl that has the address LOC */
3053*c87b03e5Sespie int
loc_mentioned_in_p(loc,in)3054*c87b03e5Sespie loc_mentioned_in_p (loc, in)
3055*c87b03e5Sespie      rtx *loc, in;
3056*c87b03e5Sespie {
3057*c87b03e5Sespie   enum rtx_code code = GET_CODE (in);
3058*c87b03e5Sespie   const char *fmt = GET_RTX_FORMAT (code);
3059*c87b03e5Sespie   int i, j;
3060*c87b03e5Sespie 
3061*c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3062*c87b03e5Sespie     {
3063*c87b03e5Sespie       if (loc == &in->fld[i].rtx)
3064*c87b03e5Sespie 	return 1;
3065*c87b03e5Sespie       if (fmt[i] == 'e')
3066*c87b03e5Sespie 	{
3067*c87b03e5Sespie 	  if (loc_mentioned_in_p (loc, XEXP (in, i)))
3068*c87b03e5Sespie 	    return 1;
3069*c87b03e5Sespie 	}
3070*c87b03e5Sespie       else if (fmt[i] == 'E')
3071*c87b03e5Sespie 	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3072*c87b03e5Sespie 	  if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3073*c87b03e5Sespie 	    return 1;
3074*c87b03e5Sespie     }
3075*c87b03e5Sespie   return 0;
3076*c87b03e5Sespie }
3077*c87b03e5Sespie 
3078*c87b03e5Sespie /* Given a subreg X, return the bit offset where the subreg begins
3079*c87b03e5Sespie    (counting from the least significant bit of the reg).  */
3080*c87b03e5Sespie 
3081*c87b03e5Sespie unsigned int
subreg_lsb(x)3082*c87b03e5Sespie subreg_lsb (x)
3083*c87b03e5Sespie      rtx x;
3084*c87b03e5Sespie {
3085*c87b03e5Sespie   enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3086*c87b03e5Sespie   enum machine_mode mode = GET_MODE (x);
3087*c87b03e5Sespie   unsigned int bitpos;
3088*c87b03e5Sespie   unsigned int byte;
3089*c87b03e5Sespie   unsigned int word;
3090*c87b03e5Sespie 
3091*c87b03e5Sespie   /* A paradoxical subreg begins at bit position 0.  */
3092*c87b03e5Sespie   if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3093*c87b03e5Sespie     return 0;
3094*c87b03e5Sespie 
3095*c87b03e5Sespie   if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3096*c87b03e5Sespie     /* If the subreg crosses a word boundary ensure that
3097*c87b03e5Sespie        it also begins and ends on a word boundary.  */
3098*c87b03e5Sespie     if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3099*c87b03e5Sespie 	 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3100*c87b03e5Sespie 	&& (SUBREG_BYTE (x) % UNITS_PER_WORD
3101*c87b03e5Sespie 	    || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3102*c87b03e5Sespie 	abort ();
3103*c87b03e5Sespie 
3104*c87b03e5Sespie   if (WORDS_BIG_ENDIAN)
3105*c87b03e5Sespie     word = (GET_MODE_SIZE (inner_mode)
3106*c87b03e5Sespie 	    - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3107*c87b03e5Sespie   else
3108*c87b03e5Sespie     word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3109*c87b03e5Sespie   bitpos = word * BITS_PER_WORD;
3110*c87b03e5Sespie 
3111*c87b03e5Sespie   if (BYTES_BIG_ENDIAN)
3112*c87b03e5Sespie     byte = (GET_MODE_SIZE (inner_mode)
3113*c87b03e5Sespie 	    - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3114*c87b03e5Sespie   else
3115*c87b03e5Sespie     byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3116*c87b03e5Sespie   bitpos += byte * BITS_PER_UNIT;
3117*c87b03e5Sespie 
3118*c87b03e5Sespie   return bitpos;
3119*c87b03e5Sespie }
3120*c87b03e5Sespie 
3121*c87b03e5Sespie /* This function returns the regno offset of a subreg expression.
3122*c87b03e5Sespie    xregno - A regno of an inner hard subreg_reg (or what will become one).
3123*c87b03e5Sespie    xmode  - The mode of xregno.
3124*c87b03e5Sespie    offset - The byte offset.
3125*c87b03e5Sespie    ymode  - The mode of a top level SUBREG (or what may become one).
3126*c87b03e5Sespie    RETURN - The regno offset which would be used.  */
3127*c87b03e5Sespie unsigned int
subreg_regno_offset(xregno,xmode,offset,ymode)3128*c87b03e5Sespie subreg_regno_offset (xregno, xmode, offset, ymode)
3129*c87b03e5Sespie      unsigned int xregno;
3130*c87b03e5Sespie      enum machine_mode xmode;
3131*c87b03e5Sespie      unsigned int offset;
3132*c87b03e5Sespie      enum machine_mode ymode;
3133*c87b03e5Sespie {
3134*c87b03e5Sespie   int nregs_xmode, nregs_ymode;
3135*c87b03e5Sespie   int mode_multiple, nregs_multiple;
3136*c87b03e5Sespie   int y_offset;
3137*c87b03e5Sespie 
3138*c87b03e5Sespie   if (xregno >= FIRST_PSEUDO_REGISTER)
3139*c87b03e5Sespie     abort ();
3140*c87b03e5Sespie 
3141*c87b03e5Sespie   nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3142*c87b03e5Sespie   nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3143*c87b03e5Sespie 
3144*c87b03e5Sespie   /* If this is a big endian paradoxical subreg, which uses more actual
3145*c87b03e5Sespie      hard registers than the original register, we must return a negative
3146*c87b03e5Sespie      offset so that we find the proper highpart of the register.  */
3147*c87b03e5Sespie   if (offset == 0
3148*c87b03e5Sespie       && nregs_ymode > nregs_xmode
3149*c87b03e5Sespie       && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3150*c87b03e5Sespie 	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3151*c87b03e5Sespie     return nregs_xmode - nregs_ymode;
3152*c87b03e5Sespie 
3153*c87b03e5Sespie   if (offset == 0 || nregs_xmode == nregs_ymode)
3154*c87b03e5Sespie     return 0;
3155*c87b03e5Sespie 
3156*c87b03e5Sespie   /* size of ymode must not be greater than the size of xmode.  */
3157*c87b03e5Sespie   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3158*c87b03e5Sespie   if (mode_multiple == 0)
3159*c87b03e5Sespie     abort ();
3160*c87b03e5Sespie 
3161*c87b03e5Sespie   y_offset = offset / GET_MODE_SIZE (ymode);
3162*c87b03e5Sespie   nregs_multiple =  nregs_xmode / nregs_ymode;
3163*c87b03e5Sespie   return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3164*c87b03e5Sespie }
3165*c87b03e5Sespie 
3166*c87b03e5Sespie /* This function returns true when the offset is representable via
3167*c87b03e5Sespie    subreg_offset in the given regno.
3168*c87b03e5Sespie    xregno - A regno of an inner hard subreg_reg (or what will become one).
3169*c87b03e5Sespie    xmode  - The mode of xregno.
3170*c87b03e5Sespie    offset - The byte offset.
3171*c87b03e5Sespie    ymode  - The mode of a top level SUBREG (or what may become one).
3172*c87b03e5Sespie    RETURN - The regno offset which would be used.  */
3173*c87b03e5Sespie bool
subreg_offset_representable_p(xregno,xmode,offset,ymode)3174*c87b03e5Sespie subreg_offset_representable_p (xregno, xmode, offset, ymode)
3175*c87b03e5Sespie      unsigned int xregno;
3176*c87b03e5Sespie      enum machine_mode xmode;
3177*c87b03e5Sespie      unsigned int offset;
3178*c87b03e5Sespie      enum machine_mode ymode;
3179*c87b03e5Sespie {
3180*c87b03e5Sespie   int nregs_xmode, nregs_ymode;
3181*c87b03e5Sespie   int mode_multiple, nregs_multiple;
3182*c87b03e5Sespie   int y_offset;
3183*c87b03e5Sespie 
3184*c87b03e5Sespie   if (xregno >= FIRST_PSEUDO_REGISTER)
3185*c87b03e5Sespie     abort ();
3186*c87b03e5Sespie 
3187*c87b03e5Sespie   nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3188*c87b03e5Sespie   nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3189*c87b03e5Sespie 
3190*c87b03e5Sespie   /* paradoxical subregs are always valid.  */
3191*c87b03e5Sespie   if (offset == 0
3192*c87b03e5Sespie       && nregs_ymode > nregs_xmode
3193*c87b03e5Sespie       && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3194*c87b03e5Sespie 	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3195*c87b03e5Sespie     return true;
3196*c87b03e5Sespie 
3197*c87b03e5Sespie   /* Lowpart subregs are always valid.  */
3198*c87b03e5Sespie   if (offset == subreg_lowpart_offset (ymode, xmode))
3199*c87b03e5Sespie     return true;
3200*c87b03e5Sespie 
3201*c87b03e5Sespie #ifdef ENABLE_CHECKING
3202*c87b03e5Sespie   /* This should always pass, otherwise we don't know how to verify the
3203*c87b03e5Sespie      constraint.  These conditions may be relaxed but subreg_offset would
3204*c87b03e5Sespie      need to be redesigned.  */
3205*c87b03e5Sespie   if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3206*c87b03e5Sespie       || GET_MODE_SIZE (ymode) % nregs_ymode
3207*c87b03e5Sespie       || nregs_xmode % nregs_ymode)
3208*c87b03e5Sespie     abort ();
3209*c87b03e5Sespie #endif
3210*c87b03e5Sespie 
3211*c87b03e5Sespie   /* The XMODE value can be seen as an vector of NREGS_XMODE
3212*c87b03e5Sespie      values.  The subreg must represent an lowpart of given field.
3213*c87b03e5Sespie      Compute what field it is.  */
3214*c87b03e5Sespie   offset -= subreg_lowpart_offset (ymode,
3215*c87b03e5Sespie 		  		   mode_for_size (GET_MODE_BITSIZE (xmode)
3216*c87b03e5Sespie 			  			  / nregs_xmode,
3217*c87b03e5Sespie 						  MODE_INT, 0));
3218*c87b03e5Sespie 
3219*c87b03e5Sespie   /* size of ymode must not be greater than the size of xmode.  */
3220*c87b03e5Sespie   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3221*c87b03e5Sespie   if (mode_multiple == 0)
3222*c87b03e5Sespie     abort ();
3223*c87b03e5Sespie 
3224*c87b03e5Sespie   y_offset = offset / GET_MODE_SIZE (ymode);
3225*c87b03e5Sespie   nregs_multiple =  nregs_xmode / nregs_ymode;
3226*c87b03e5Sespie #ifdef ENABLE_CHECKING
3227*c87b03e5Sespie   if (offset % GET_MODE_SIZE (ymode)
3228*c87b03e5Sespie       || mode_multiple % nregs_multiple)
3229*c87b03e5Sespie     abort ();
3230*c87b03e5Sespie #endif
3231*c87b03e5Sespie   return (!(y_offset % (mode_multiple / nregs_multiple)));
3232*c87b03e5Sespie }
3233*c87b03e5Sespie 
3234*c87b03e5Sespie /* Return the final regno that a subreg expression refers to.  */
3235*c87b03e5Sespie unsigned int
subreg_regno(x)3236*c87b03e5Sespie subreg_regno (x)
3237*c87b03e5Sespie      rtx x;
3238*c87b03e5Sespie {
3239*c87b03e5Sespie   unsigned int ret;
3240*c87b03e5Sespie   rtx subreg = SUBREG_REG (x);
3241*c87b03e5Sespie   int regno = REGNO (subreg);
3242*c87b03e5Sespie 
3243*c87b03e5Sespie   ret = regno + subreg_regno_offset (regno,
3244*c87b03e5Sespie 				     GET_MODE (subreg),
3245*c87b03e5Sespie 				     SUBREG_BYTE (x),
3246*c87b03e5Sespie 				     GET_MODE (x));
3247*c87b03e5Sespie   return ret;
3248*c87b03e5Sespie 
3249*c87b03e5Sespie }
3250*c87b03e5Sespie struct parms_set_data
3251*c87b03e5Sespie {
3252*c87b03e5Sespie   int nregs;
3253*c87b03e5Sespie   HARD_REG_SET regs;
3254*c87b03e5Sespie };
3255*c87b03e5Sespie 
3256*c87b03e5Sespie /* Helper function for noticing stores to parameter registers.  */
3257*c87b03e5Sespie static void
parms_set(x,pat,data)3258*c87b03e5Sespie parms_set (x, pat, data)
3259*c87b03e5Sespie 	rtx x, pat ATTRIBUTE_UNUSED;
3260*c87b03e5Sespie 	void *data;
3261*c87b03e5Sespie {
3262*c87b03e5Sespie   struct parms_set_data *d = data;
3263*c87b03e5Sespie   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3264*c87b03e5Sespie       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3265*c87b03e5Sespie     {
3266*c87b03e5Sespie       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3267*c87b03e5Sespie       d->nregs--;
3268*c87b03e5Sespie     }
3269*c87b03e5Sespie }
3270*c87b03e5Sespie 
3271*c87b03e5Sespie /* Look backward for first parameter to be loaded.
3272*c87b03e5Sespie    Do not skip BOUNDARY.  */
3273*c87b03e5Sespie rtx
find_first_parameter_load(call_insn,boundary)3274*c87b03e5Sespie find_first_parameter_load (call_insn, boundary)
3275*c87b03e5Sespie      rtx call_insn, boundary;
3276*c87b03e5Sespie {
3277*c87b03e5Sespie   struct parms_set_data parm;
3278*c87b03e5Sespie   rtx p, before;
3279*c87b03e5Sespie 
3280*c87b03e5Sespie   /* Since different machines initialize their parameter registers
3281*c87b03e5Sespie      in different orders, assume nothing.  Collect the set of all
3282*c87b03e5Sespie      parameter registers.  */
3283*c87b03e5Sespie   CLEAR_HARD_REG_SET (parm.regs);
3284*c87b03e5Sespie   parm.nregs = 0;
3285*c87b03e5Sespie   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3286*c87b03e5Sespie     if (GET_CODE (XEXP (p, 0)) == USE
3287*c87b03e5Sespie 	&& GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3288*c87b03e5Sespie       {
3289*c87b03e5Sespie 	if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3290*c87b03e5Sespie 	  abort ();
3291*c87b03e5Sespie 
3292*c87b03e5Sespie 	/* We only care about registers which can hold function
3293*c87b03e5Sespie 	   arguments.  */
3294*c87b03e5Sespie 	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3295*c87b03e5Sespie 	  continue;
3296*c87b03e5Sespie 
3297*c87b03e5Sespie 	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3298*c87b03e5Sespie 	parm.nregs++;
3299*c87b03e5Sespie       }
3300*c87b03e5Sespie   before = call_insn;
3301*c87b03e5Sespie 
3302*c87b03e5Sespie   /* Search backward for the first set of a register in this set.  */
3303*c87b03e5Sespie   while (parm.nregs && before != boundary)
3304*c87b03e5Sespie     {
3305*c87b03e5Sespie       before = PREV_INSN (before);
3306*c87b03e5Sespie 
3307*c87b03e5Sespie       /* It is possible that some loads got CSEed from one call to
3308*c87b03e5Sespie          another.  Stop in that case.  */
3309*c87b03e5Sespie       if (GET_CODE (before) == CALL_INSN)
3310*c87b03e5Sespie 	break;
3311*c87b03e5Sespie 
3312*c87b03e5Sespie       /* Our caller needs either ensure that we will find all sets
3313*c87b03e5Sespie          (in case code has not been optimized yet), or take care
3314*c87b03e5Sespie          for possible labels in a way by setting boundary to preceding
3315*c87b03e5Sespie          CODE_LABEL.  */
3316*c87b03e5Sespie       if (GET_CODE (before) == CODE_LABEL)
3317*c87b03e5Sespie 	{
3318*c87b03e5Sespie 	  if (before != boundary)
3319*c87b03e5Sespie 	    abort ();
3320*c87b03e5Sespie 	  break;
3321*c87b03e5Sespie 	}
3322*c87b03e5Sespie 
3323*c87b03e5Sespie       if (INSN_P (before))
3324*c87b03e5Sespie 	note_stores (PATTERN (before), parms_set, &parm);
3325*c87b03e5Sespie     }
3326*c87b03e5Sespie   return before;
3327*c87b03e5Sespie }
3328*c87b03e5Sespie 
3329*c87b03e5Sespie /* Return true if we should avoid inserting code between INSN and preceeding
3330*c87b03e5Sespie    call instruction.  */
3331*c87b03e5Sespie 
3332*c87b03e5Sespie bool
keep_with_call_p(insn)3333*c87b03e5Sespie keep_with_call_p (insn)
3334*c87b03e5Sespie      rtx insn;
3335*c87b03e5Sespie {
3336*c87b03e5Sespie   rtx set;
3337*c87b03e5Sespie 
3338*c87b03e5Sespie   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3339*c87b03e5Sespie     {
3340*c87b03e5Sespie       if (GET_CODE (SET_DEST (set)) == REG
3341*c87b03e5Sespie 	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3342*c87b03e5Sespie 	  && fixed_regs[REGNO (SET_DEST (set))]
3343*c87b03e5Sespie 	  && general_operand (SET_SRC (set), VOIDmode))
3344*c87b03e5Sespie 	return true;
3345*c87b03e5Sespie       if (GET_CODE (SET_SRC (set)) == REG
3346*c87b03e5Sespie 	  && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3347*c87b03e5Sespie 	  && GET_CODE (SET_DEST (set)) == REG
3348*c87b03e5Sespie 	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3349*c87b03e5Sespie 	return true;
3350*c87b03e5Sespie       /* There may be a stack pop just after the call and before the store
3351*c87b03e5Sespie 	 of the return register.  Search for the actual store when deciding
3352*c87b03e5Sespie 	 if we can break or not.  */
3353*c87b03e5Sespie       if (SET_DEST (set) == stack_pointer_rtx)
3354*c87b03e5Sespie 	{
3355*c87b03e5Sespie 	  rtx i2 = next_nonnote_insn (insn);
3356*c87b03e5Sespie 	  if (i2 && keep_with_call_p (i2))
3357*c87b03e5Sespie 	    return true;
3358*c87b03e5Sespie 	}
3359*c87b03e5Sespie     }
3360*c87b03e5Sespie   return false;
3361*c87b03e5Sespie }
3362*c87b03e5Sespie 
3363*c87b03e5Sespie /* Return true when store to register X can be hoisted to the place
3364*c87b03e5Sespie    with LIVE registers (can be NULL).  Value VAL contains destination
3365*c87b03e5Sespie    whose value will be used.  */
3366*c87b03e5Sespie 
3367*c87b03e5Sespie static bool
hoist_test_store(x,val,live)3368*c87b03e5Sespie hoist_test_store (x, val, live)
3369*c87b03e5Sespie      rtx x, val;
3370*c87b03e5Sespie      regset live;
3371*c87b03e5Sespie {
3372*c87b03e5Sespie   if (GET_CODE (x) == SCRATCH)
3373*c87b03e5Sespie     return true;
3374*c87b03e5Sespie 
3375*c87b03e5Sespie   if (rtx_equal_p (x, val))
3376*c87b03e5Sespie     return true;
3377*c87b03e5Sespie 
3378*c87b03e5Sespie   /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3379*c87b03e5Sespie      Then we would need to update all users to care hoisting the store too.
3380*c87b03e5Sespie      Caller may represent that by specifying whole subreg as val.  */
3381*c87b03e5Sespie 
3382*c87b03e5Sespie   if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3383*c87b03e5Sespie     {
3384*c87b03e5Sespie       if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3385*c87b03e5Sespie 	  && GET_MODE_BITSIZE (GET_MODE (x)) <
3386*c87b03e5Sespie 	  GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3387*c87b03e5Sespie 	return false;
3388*c87b03e5Sespie       return true;
3389*c87b03e5Sespie     }
3390*c87b03e5Sespie   if (GET_CODE (x) == SUBREG)
3391*c87b03e5Sespie     x = SUBREG_REG (x);
3392*c87b03e5Sespie 
3393*c87b03e5Sespie   /* Anything except register store is not hoistable.  This includes the
3394*c87b03e5Sespie      partial stores to registers.  */
3395*c87b03e5Sespie 
3396*c87b03e5Sespie   if (!REG_P (x))
3397*c87b03e5Sespie     return false;
3398*c87b03e5Sespie 
3399*c87b03e5Sespie   /* Pseudo registers can be allways replaced by another pseudo to avoid
3400*c87b03e5Sespie      the side effect, for hard register we must ensure that they are dead.
3401*c87b03e5Sespie      Eventually we may want to add code to try turn pseudos to hards, but it
3402*c87b03e5Sespie      is unlikely usefull.  */
3403*c87b03e5Sespie 
3404*c87b03e5Sespie   if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3405*c87b03e5Sespie     {
3406*c87b03e5Sespie       int regno = REGNO (x);
3407*c87b03e5Sespie       int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3408*c87b03e5Sespie 
3409*c87b03e5Sespie       if (!live)
3410*c87b03e5Sespie 	return false;
3411*c87b03e5Sespie       if (REGNO_REG_SET_P (live, regno))
3412*c87b03e5Sespie 	return false;
3413*c87b03e5Sespie       while (--n > 0)
3414*c87b03e5Sespie 	if (REGNO_REG_SET_P (live, regno + n))
3415*c87b03e5Sespie 	  return false;
3416*c87b03e5Sespie     }
3417*c87b03e5Sespie   return true;
3418*c87b03e5Sespie }
3419*c87b03e5Sespie 
3420*c87b03e5Sespie 
3421*c87b03e5Sespie /* Return true if INSN can be hoisted to place with LIVE hard registers
3422*c87b03e5Sespie    (LIVE can be NULL when unknown).  VAL is expected to be stored by the insn
3423*c87b03e5Sespie    and used by the hoisting pass.  */
3424*c87b03e5Sespie 
3425*c87b03e5Sespie bool
can_hoist_insn_p(insn,val,live)3426*c87b03e5Sespie can_hoist_insn_p (insn, val, live)
3427*c87b03e5Sespie      rtx insn, val;
3428*c87b03e5Sespie      regset live;
3429*c87b03e5Sespie {
3430*c87b03e5Sespie   rtx pat = PATTERN (insn);
3431*c87b03e5Sespie   int i;
3432*c87b03e5Sespie 
3433*c87b03e5Sespie   /* It probably does not worth the complexity to handle multiple
3434*c87b03e5Sespie      set stores.  */
3435*c87b03e5Sespie   if (!single_set (insn))
3436*c87b03e5Sespie     return false;
3437*c87b03e5Sespie   /* We can move CALL_INSN, but we need to check that all caller clobbered
3438*c87b03e5Sespie      regs are dead.  */
3439*c87b03e5Sespie   if (GET_CODE (insn) == CALL_INSN)
3440*c87b03e5Sespie     return false;
3441*c87b03e5Sespie   /* In future we will handle hoisting of libcall sequences, but
3442*c87b03e5Sespie      give up for now.  */
3443*c87b03e5Sespie   if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3444*c87b03e5Sespie     return false;
3445*c87b03e5Sespie   switch (GET_CODE (pat))
3446*c87b03e5Sespie     {
3447*c87b03e5Sespie     case SET:
3448*c87b03e5Sespie       if (!hoist_test_store (SET_DEST (pat), val, live))
3449*c87b03e5Sespie 	return false;
3450*c87b03e5Sespie       break;
3451*c87b03e5Sespie     case USE:
3452*c87b03e5Sespie       /* USES do have sick semantics, so do not move them.  */
3453*c87b03e5Sespie       return false;
3454*c87b03e5Sespie       break;
3455*c87b03e5Sespie     case CLOBBER:
3456*c87b03e5Sespie       if (!hoist_test_store (XEXP (pat, 0), val, live))
3457*c87b03e5Sespie 	return false;
3458*c87b03e5Sespie       break;
3459*c87b03e5Sespie     case PARALLEL:
3460*c87b03e5Sespie       for (i = 0; i < XVECLEN (pat, 0); i++)
3461*c87b03e5Sespie 	{
3462*c87b03e5Sespie 	  rtx x = XVECEXP (pat, 0, i);
3463*c87b03e5Sespie 	  switch (GET_CODE (x))
3464*c87b03e5Sespie 	    {
3465*c87b03e5Sespie 	    case SET:
3466*c87b03e5Sespie 	      if (!hoist_test_store (SET_DEST (x), val, live))
3467*c87b03e5Sespie 		return false;
3468*c87b03e5Sespie 	      break;
3469*c87b03e5Sespie 	    case USE:
3470*c87b03e5Sespie 	      /* We need to fix callers to really ensure availability
3471*c87b03e5Sespie 	         of all values inisn uses, but for now it is safe to prohibit
3472*c87b03e5Sespie 		 hoisting of any insn having such a hiden uses.  */
3473*c87b03e5Sespie 	      return false;
3474*c87b03e5Sespie 	      break;
3475*c87b03e5Sespie 	    case CLOBBER:
3476*c87b03e5Sespie 	      if (!hoist_test_store (SET_DEST (x), val, live))
3477*c87b03e5Sespie 		return false;
3478*c87b03e5Sespie 	      break;
3479*c87b03e5Sespie 	    default:
3480*c87b03e5Sespie 	      break;
3481*c87b03e5Sespie 	    }
3482*c87b03e5Sespie 	}
3483*c87b03e5Sespie       break;
3484*c87b03e5Sespie     default:
3485*c87b03e5Sespie       abort ();
3486*c87b03e5Sespie     }
3487*c87b03e5Sespie   return true;
3488*c87b03e5Sespie }
3489*c87b03e5Sespie 
3490*c87b03e5Sespie /* Update store after hoisting - replace all stores to pseudo registers
3491*c87b03e5Sespie    by new ones to avoid clobbering of values except for store to VAL that will
3492*c87b03e5Sespie    be updated to NEW.  */
3493*c87b03e5Sespie 
3494*c87b03e5Sespie static void
hoist_update_store(insn,xp,val,new)3495*c87b03e5Sespie hoist_update_store (insn, xp, val, new)
3496*c87b03e5Sespie      rtx insn, *xp, val, new;
3497*c87b03e5Sespie {
3498*c87b03e5Sespie   rtx x = *xp;
3499*c87b03e5Sespie 
3500*c87b03e5Sespie   if (GET_CODE (x) == SCRATCH)
3501*c87b03e5Sespie     return;
3502*c87b03e5Sespie 
3503*c87b03e5Sespie   if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3504*c87b03e5Sespie     validate_change (insn, xp,
3505*c87b03e5Sespie 		     simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3506*c87b03e5Sespie 					  SUBREG_BYTE (x)), 1);
3507*c87b03e5Sespie   if (rtx_equal_p (x, val))
3508*c87b03e5Sespie     {
3509*c87b03e5Sespie       validate_change (insn, xp, new, 1);
3510*c87b03e5Sespie       return;
3511*c87b03e5Sespie     }
3512*c87b03e5Sespie   if (GET_CODE (x) == SUBREG)
3513*c87b03e5Sespie     {
3514*c87b03e5Sespie       xp = &SUBREG_REG (x);
3515*c87b03e5Sespie       x = *xp;
3516*c87b03e5Sespie     }
3517*c87b03e5Sespie 
3518*c87b03e5Sespie   if (!REG_P (x))
3519*c87b03e5Sespie     abort ();
3520*c87b03e5Sespie 
3521*c87b03e5Sespie   /* We've verified that hard registers are dead, so we may keep the side
3522*c87b03e5Sespie      effect.  Otherwise replace it by new pseudo.  */
3523*c87b03e5Sespie   if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3524*c87b03e5Sespie     validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3525*c87b03e5Sespie   REG_NOTES (insn)
3526*c87b03e5Sespie     = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3527*c87b03e5Sespie }
3528*c87b03e5Sespie 
3529*c87b03e5Sespie /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3530*c87b03e5Sespie    and each other side effect to pseudo register by new pseudo register.  */
3531*c87b03e5Sespie 
3532*c87b03e5Sespie rtx
hoist_insn_after(insn,after,val,new)3533*c87b03e5Sespie hoist_insn_after (insn, after, val, new)
3534*c87b03e5Sespie      rtx insn, after, val, new;
3535*c87b03e5Sespie {
3536*c87b03e5Sespie   rtx pat;
3537*c87b03e5Sespie   int i;
3538*c87b03e5Sespie   rtx note;
3539*c87b03e5Sespie 
3540*c87b03e5Sespie   insn = emit_copy_of_insn_after (insn, after);
3541*c87b03e5Sespie   pat = PATTERN (insn);
3542*c87b03e5Sespie 
3543*c87b03e5Sespie   /* Remove REG_UNUSED notes as we will re-emit them.  */
3544*c87b03e5Sespie   while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3545*c87b03e5Sespie     remove_note (insn, note);
3546*c87b03e5Sespie 
3547*c87b03e5Sespie   /* To get this working callers must ensure to move everything referenced
3548*c87b03e5Sespie      by REG_EQUAL/REG_EQUIV notes too.  Lets remove them, it is probably
3549*c87b03e5Sespie      easier.  */
3550*c87b03e5Sespie   while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3551*c87b03e5Sespie     remove_note (insn, note);
3552*c87b03e5Sespie   while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3553*c87b03e5Sespie     remove_note (insn, note);
3554*c87b03e5Sespie 
3555*c87b03e5Sespie   /* Remove REG_DEAD notes as they might not be valid anymore in case
3556*c87b03e5Sespie      we create redundancy.  */
3557*c87b03e5Sespie   while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3558*c87b03e5Sespie     remove_note (insn, note);
3559*c87b03e5Sespie   switch (GET_CODE (pat))
3560*c87b03e5Sespie     {
3561*c87b03e5Sespie     case SET:
3562*c87b03e5Sespie       hoist_update_store (insn, &SET_DEST (pat), val, new);
3563*c87b03e5Sespie       break;
3564*c87b03e5Sespie     case USE:
3565*c87b03e5Sespie       break;
3566*c87b03e5Sespie     case CLOBBER:
3567*c87b03e5Sespie       hoist_update_store (insn, &XEXP (pat, 0), val, new);
3568*c87b03e5Sespie       break;
3569*c87b03e5Sespie     case PARALLEL:
3570*c87b03e5Sespie       for (i = 0; i < XVECLEN (pat, 0); i++)
3571*c87b03e5Sespie 	{
3572*c87b03e5Sespie 	  rtx x = XVECEXP (pat, 0, i);
3573*c87b03e5Sespie 	  switch (GET_CODE (x))
3574*c87b03e5Sespie 	    {
3575*c87b03e5Sespie 	    case SET:
3576*c87b03e5Sespie 	      hoist_update_store (insn, &SET_DEST (x), val, new);
3577*c87b03e5Sespie 	      break;
3578*c87b03e5Sespie 	    case USE:
3579*c87b03e5Sespie 	      break;
3580*c87b03e5Sespie 	    case CLOBBER:
3581*c87b03e5Sespie 	      hoist_update_store (insn, &SET_DEST (x), val, new);
3582*c87b03e5Sespie 	      break;
3583*c87b03e5Sespie 	    default:
3584*c87b03e5Sespie 	      break;
3585*c87b03e5Sespie 	    }
3586*c87b03e5Sespie 	}
3587*c87b03e5Sespie       break;
3588*c87b03e5Sespie     default:
3589*c87b03e5Sespie       abort ();
3590*c87b03e5Sespie     }
3591*c87b03e5Sespie   if (!apply_change_group ())
3592*c87b03e5Sespie     abort ();
3593*c87b03e5Sespie 
3594*c87b03e5Sespie   return insn;
3595*c87b03e5Sespie }
3596*c87b03e5Sespie 
3597*c87b03e5Sespie rtx
hoist_insn_to_edge(insn,e,val,new)3598*c87b03e5Sespie hoist_insn_to_edge (insn, e, val, new)
3599*c87b03e5Sespie      rtx insn, val, new;
3600*c87b03e5Sespie      edge e;
3601*c87b03e5Sespie {
3602*c87b03e5Sespie   rtx new_insn;
3603*c87b03e5Sespie 
3604*c87b03e5Sespie   /* We cannot insert instructions on an abnormal critical edge.
3605*c87b03e5Sespie      It will be easier to find the culprit if we die now.  */
3606*c87b03e5Sespie   if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3607*c87b03e5Sespie     abort ();
3608*c87b03e5Sespie 
3609*c87b03e5Sespie   /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3610*c87b03e5Sespie      stuff.  We also emit CALL_INSNS and firends.  */
3611*c87b03e5Sespie   if (e->insns == NULL_RTX)
3612*c87b03e5Sespie     {
3613*c87b03e5Sespie       start_sequence ();
3614*c87b03e5Sespie       emit_note (NULL, NOTE_INSN_DELETED);
3615*c87b03e5Sespie     }
3616*c87b03e5Sespie   else
3617*c87b03e5Sespie     push_to_sequence (e->insns);
3618*c87b03e5Sespie 
3619*c87b03e5Sespie   new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3620*c87b03e5Sespie 
3621*c87b03e5Sespie   e->insns = get_insns ();
3622*c87b03e5Sespie   end_sequence ();
3623*c87b03e5Sespie   return new_insn;
3624*c87b03e5Sespie }
3625