xref: /dflybsd-src/contrib/gcc-4.7/gcc/rtlanal.c (revision 04febcfb30580676d3e95f58a16c5137ee478b32)
1*e4b17023SJohn Marino /* Analyze RTL for GNU compiler.
2*e4b17023SJohn Marino    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*e4b17023SJohn Marino    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4*e4b17023SJohn Marino    2011 Free Software Foundation, Inc.
5*e4b17023SJohn Marino 
6*e4b17023SJohn Marino This file is part of GCC.
7*e4b17023SJohn Marino 
8*e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify it under
9*e4b17023SJohn Marino the terms of the GNU General Public License as published by the Free
10*e4b17023SJohn Marino Software Foundation; either version 3, or (at your option) any later
11*e4b17023SJohn Marino version.
12*e4b17023SJohn Marino 
13*e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*e4b17023SJohn Marino WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*e4b17023SJohn Marino FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16*e4b17023SJohn Marino for more details.
17*e4b17023SJohn Marino 
18*e4b17023SJohn Marino You should have received a copy of the GNU General Public License
19*e4b17023SJohn Marino along with GCC; see the file COPYING3.  If not see
20*e4b17023SJohn Marino <http://www.gnu.org/licenses/>.  */
21*e4b17023SJohn Marino 
22*e4b17023SJohn Marino 
23*e4b17023SJohn Marino #include "config.h"
24*e4b17023SJohn Marino #include "system.h"
25*e4b17023SJohn Marino #include "coretypes.h"
26*e4b17023SJohn Marino #include "tm.h"
27*e4b17023SJohn Marino #include "diagnostic-core.h"
28*e4b17023SJohn Marino #include "hard-reg-set.h"
29*e4b17023SJohn Marino #include "rtl.h"
30*e4b17023SJohn Marino #include "insn-config.h"
31*e4b17023SJohn Marino #include "recog.h"
32*e4b17023SJohn Marino #include "target.h"
33*e4b17023SJohn Marino #include "output.h"
34*e4b17023SJohn Marino #include "tm_p.h"
35*e4b17023SJohn Marino #include "flags.h"
36*e4b17023SJohn Marino #include "regs.h"
37*e4b17023SJohn Marino #include "function.h"
38*e4b17023SJohn Marino #include "df.h"
39*e4b17023SJohn Marino #include "tree.h"
40*e4b17023SJohn Marino #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
41*e4b17023SJohn Marino 
42*e4b17023SJohn Marino /* Forward declarations */
43*e4b17023SJohn Marino static void set_of_1 (rtx, const_rtx, void *);
44*e4b17023SJohn Marino static bool covers_regno_p (const_rtx, unsigned int);
45*e4b17023SJohn Marino static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
46*e4b17023SJohn Marino static int rtx_referenced_p_1 (rtx *, void *);
47*e4b17023SJohn Marino static int computed_jump_p_1 (const_rtx);
48*e4b17023SJohn Marino static void parms_set (rtx, const_rtx, void *);
49*e4b17023SJohn Marino 
50*e4b17023SJohn Marino static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
51*e4b17023SJohn Marino                                                    const_rtx, enum machine_mode,
52*e4b17023SJohn Marino                                                    unsigned HOST_WIDE_INT);
53*e4b17023SJohn Marino static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
54*e4b17023SJohn Marino 					     const_rtx, enum machine_mode,
55*e4b17023SJohn Marino                                              unsigned HOST_WIDE_INT);
56*e4b17023SJohn Marino static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
57*e4b17023SJohn Marino                                                 enum machine_mode,
58*e4b17023SJohn Marino                                                 unsigned int);
59*e4b17023SJohn Marino static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
60*e4b17023SJohn Marino                                           enum machine_mode, unsigned int);
61*e4b17023SJohn Marino 
62*e4b17023SJohn Marino /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
63*e4b17023SJohn Marino    -1 if a code has no such operand.  */
64*e4b17023SJohn Marino static int non_rtx_starting_operands[NUM_RTX_CODE];
65*e4b17023SJohn Marino 
66*e4b17023SJohn Marino /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
67*e4b17023SJohn Marino    If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
68*e4b17023SJohn Marino    SIGN_EXTEND then while narrowing we also have to enforce the
69*e4b17023SJohn Marino    representation and sign-extend the value to mode DESTINATION_REP.
70*e4b17023SJohn Marino 
71*e4b17023SJohn Marino    If the value is already sign-extended to DESTINATION_REP mode we
72*e4b17023SJohn Marino    can just switch to DESTINATION mode on it.  For each pair of
73*e4b17023SJohn Marino    integral modes SOURCE and DESTINATION, when truncating from SOURCE
74*e4b17023SJohn Marino    to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
75*e4b17023SJohn Marino    contains the number of high-order bits in SOURCE that have to be
76*e4b17023SJohn Marino    copies of the sign-bit so that we can do this mode-switch to
77*e4b17023SJohn Marino    DESTINATION.  */
78*e4b17023SJohn Marino 
79*e4b17023SJohn Marino static unsigned int
80*e4b17023SJohn Marino num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
81*e4b17023SJohn Marino 
82*e4b17023SJohn Marino /* Return 1 if the value of X is unstable
83*e4b17023SJohn Marino    (would be different at a different point in the program).
84*e4b17023SJohn Marino    The frame pointer, arg pointer, etc. are considered stable
85*e4b17023SJohn Marino    (within one function) and so is anything marked `unchanging'.  */
86*e4b17023SJohn Marino 
87*e4b17023SJohn Marino int
rtx_unstable_p(const_rtx x)88*e4b17023SJohn Marino rtx_unstable_p (const_rtx x)
89*e4b17023SJohn Marino {
90*e4b17023SJohn Marino   const RTX_CODE code = GET_CODE (x);
91*e4b17023SJohn Marino   int i;
92*e4b17023SJohn Marino   const char *fmt;
93*e4b17023SJohn Marino 
94*e4b17023SJohn Marino   switch (code)
95*e4b17023SJohn Marino     {
96*e4b17023SJohn Marino     case MEM:
97*e4b17023SJohn Marino       return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
98*e4b17023SJohn Marino 
99*e4b17023SJohn Marino     case CONST:
100*e4b17023SJohn Marino     case CONST_INT:
101*e4b17023SJohn Marino     case CONST_DOUBLE:
102*e4b17023SJohn Marino     case CONST_FIXED:
103*e4b17023SJohn Marino     case CONST_VECTOR:
104*e4b17023SJohn Marino     case SYMBOL_REF:
105*e4b17023SJohn Marino     case LABEL_REF:
106*e4b17023SJohn Marino       return 0;
107*e4b17023SJohn Marino 
108*e4b17023SJohn Marino     case REG:
109*e4b17023SJohn Marino       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
110*e4b17023SJohn Marino       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
111*e4b17023SJohn Marino 	  /* The arg pointer varies if it is not a fixed register.  */
112*e4b17023SJohn Marino 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
113*e4b17023SJohn Marino 	return 0;
114*e4b17023SJohn Marino       /* ??? When call-clobbered, the value is stable modulo the restore
115*e4b17023SJohn Marino 	 that must happen after a call.  This currently screws up local-alloc
116*e4b17023SJohn Marino 	 into believing that the restore is not needed.  */
117*e4b17023SJohn Marino       if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
118*e4b17023SJohn Marino 	return 0;
119*e4b17023SJohn Marino       return 1;
120*e4b17023SJohn Marino 
121*e4b17023SJohn Marino     case ASM_OPERANDS:
122*e4b17023SJohn Marino       if (MEM_VOLATILE_P (x))
123*e4b17023SJohn Marino 	return 1;
124*e4b17023SJohn Marino 
125*e4b17023SJohn Marino       /* Fall through.  */
126*e4b17023SJohn Marino 
127*e4b17023SJohn Marino     default:
128*e4b17023SJohn Marino       break;
129*e4b17023SJohn Marino     }
130*e4b17023SJohn Marino 
131*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
132*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
133*e4b17023SJohn Marino     if (fmt[i] == 'e')
134*e4b17023SJohn Marino       {
135*e4b17023SJohn Marino 	if (rtx_unstable_p (XEXP (x, i)))
136*e4b17023SJohn Marino 	  return 1;
137*e4b17023SJohn Marino       }
138*e4b17023SJohn Marino     else if (fmt[i] == 'E')
139*e4b17023SJohn Marino       {
140*e4b17023SJohn Marino 	int j;
141*e4b17023SJohn Marino 	for (j = 0; j < XVECLEN (x, i); j++)
142*e4b17023SJohn Marino 	  if (rtx_unstable_p (XVECEXP (x, i, j)))
143*e4b17023SJohn Marino 	    return 1;
144*e4b17023SJohn Marino       }
145*e4b17023SJohn Marino 
146*e4b17023SJohn Marino   return 0;
147*e4b17023SJohn Marino }
148*e4b17023SJohn Marino 
149*e4b17023SJohn Marino /* Return 1 if X has a value that can vary even between two
150*e4b17023SJohn Marino    executions of the program.  0 means X can be compared reliably
151*e4b17023SJohn Marino    against certain constants or near-constants.
152*e4b17023SJohn Marino    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
153*e4b17023SJohn Marino    zero, we are slightly more conservative.
154*e4b17023SJohn Marino    The frame pointer and the arg pointer are considered constant.  */
155*e4b17023SJohn Marino 
156*e4b17023SJohn Marino bool
rtx_varies_p(const_rtx x,bool for_alias)157*e4b17023SJohn Marino rtx_varies_p (const_rtx x, bool for_alias)
158*e4b17023SJohn Marino {
159*e4b17023SJohn Marino   RTX_CODE code;
160*e4b17023SJohn Marino   int i;
161*e4b17023SJohn Marino   const char *fmt;
162*e4b17023SJohn Marino 
163*e4b17023SJohn Marino   if (!x)
164*e4b17023SJohn Marino     return 0;
165*e4b17023SJohn Marino 
166*e4b17023SJohn Marino   code = GET_CODE (x);
167*e4b17023SJohn Marino   switch (code)
168*e4b17023SJohn Marino     {
169*e4b17023SJohn Marino     case MEM:
170*e4b17023SJohn Marino       return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
171*e4b17023SJohn Marino 
172*e4b17023SJohn Marino     case CONST:
173*e4b17023SJohn Marino     case CONST_INT:
174*e4b17023SJohn Marino     case CONST_DOUBLE:
175*e4b17023SJohn Marino     case CONST_FIXED:
176*e4b17023SJohn Marino     case CONST_VECTOR:
177*e4b17023SJohn Marino     case SYMBOL_REF:
178*e4b17023SJohn Marino     case LABEL_REF:
179*e4b17023SJohn Marino       return 0;
180*e4b17023SJohn Marino 
181*e4b17023SJohn Marino     case REG:
182*e4b17023SJohn Marino       /* Note that we have to test for the actual rtx used for the frame
183*e4b17023SJohn Marino 	 and arg pointers and not just the register number in case we have
184*e4b17023SJohn Marino 	 eliminated the frame and/or arg pointer and are using it
185*e4b17023SJohn Marino 	 for pseudos.  */
186*e4b17023SJohn Marino       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
187*e4b17023SJohn Marino 	  /* The arg pointer varies if it is not a fixed register.  */
188*e4b17023SJohn Marino 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
189*e4b17023SJohn Marino 	return 0;
190*e4b17023SJohn Marino       if (x == pic_offset_table_rtx
191*e4b17023SJohn Marino 	  /* ??? When call-clobbered, the value is stable modulo the restore
192*e4b17023SJohn Marino 	     that must happen after a call.  This currently screws up
193*e4b17023SJohn Marino 	     local-alloc into believing that the restore is not needed, so we
194*e4b17023SJohn Marino 	     must return 0 only if we are called from alias analysis.  */
195*e4b17023SJohn Marino 	  && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
196*e4b17023SJohn Marino 	return 0;
197*e4b17023SJohn Marino       return 1;
198*e4b17023SJohn Marino 
199*e4b17023SJohn Marino     case LO_SUM:
200*e4b17023SJohn Marino       /* The operand 0 of a LO_SUM is considered constant
201*e4b17023SJohn Marino 	 (in fact it is related specifically to operand 1)
202*e4b17023SJohn Marino 	 during alias analysis.  */
203*e4b17023SJohn Marino       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
204*e4b17023SJohn Marino 	     || rtx_varies_p (XEXP (x, 1), for_alias);
205*e4b17023SJohn Marino 
206*e4b17023SJohn Marino     case ASM_OPERANDS:
207*e4b17023SJohn Marino       if (MEM_VOLATILE_P (x))
208*e4b17023SJohn Marino 	return 1;
209*e4b17023SJohn Marino 
210*e4b17023SJohn Marino       /* Fall through.  */
211*e4b17023SJohn Marino 
212*e4b17023SJohn Marino     default:
213*e4b17023SJohn Marino       break;
214*e4b17023SJohn Marino     }
215*e4b17023SJohn Marino 
216*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
217*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
218*e4b17023SJohn Marino     if (fmt[i] == 'e')
219*e4b17023SJohn Marino       {
220*e4b17023SJohn Marino 	if (rtx_varies_p (XEXP (x, i), for_alias))
221*e4b17023SJohn Marino 	  return 1;
222*e4b17023SJohn Marino       }
223*e4b17023SJohn Marino     else if (fmt[i] == 'E')
224*e4b17023SJohn Marino       {
225*e4b17023SJohn Marino 	int j;
226*e4b17023SJohn Marino 	for (j = 0; j < XVECLEN (x, i); j++)
227*e4b17023SJohn Marino 	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
228*e4b17023SJohn Marino 	    return 1;
229*e4b17023SJohn Marino       }
230*e4b17023SJohn Marino 
231*e4b17023SJohn Marino   return 0;
232*e4b17023SJohn Marino }
233*e4b17023SJohn Marino 
234*e4b17023SJohn Marino /* Return nonzero if the use of X as an address in a MEM can cause a trap.
235*e4b17023SJohn Marino    MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
236*e4b17023SJohn Marino    whether nonzero is returned for unaligned memory accesses on strict
237*e4b17023SJohn Marino    alignment machines.  */
238*e4b17023SJohn Marino 
239*e4b17023SJohn Marino static int
rtx_addr_can_trap_p_1(const_rtx x,HOST_WIDE_INT offset,HOST_WIDE_INT size,enum machine_mode mode,bool unaligned_mems)240*e4b17023SJohn Marino rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
241*e4b17023SJohn Marino 		       enum machine_mode mode, bool unaligned_mems)
242*e4b17023SJohn Marino {
243*e4b17023SJohn Marino   enum rtx_code code = GET_CODE (x);
244*e4b17023SJohn Marino 
245*e4b17023SJohn Marino   if (STRICT_ALIGNMENT
246*e4b17023SJohn Marino       && unaligned_mems
247*e4b17023SJohn Marino       && GET_MODE_SIZE (mode) != 0)
248*e4b17023SJohn Marino     {
249*e4b17023SJohn Marino       HOST_WIDE_INT actual_offset = offset;
250*e4b17023SJohn Marino #ifdef SPARC_STACK_BOUNDARY_HACK
251*e4b17023SJohn Marino       /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
252*e4b17023SJohn Marino 	     the real alignment of %sp.  However, when it does this, the
253*e4b17023SJohn Marino 	     alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
254*e4b17023SJohn Marino       if (SPARC_STACK_BOUNDARY_HACK
255*e4b17023SJohn Marino 	  && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
256*e4b17023SJohn Marino 	actual_offset -= STACK_POINTER_OFFSET;
257*e4b17023SJohn Marino #endif
258*e4b17023SJohn Marino 
259*e4b17023SJohn Marino       if (actual_offset % GET_MODE_SIZE (mode) != 0)
260*e4b17023SJohn Marino 	return 1;
261*e4b17023SJohn Marino     }
262*e4b17023SJohn Marino 
263*e4b17023SJohn Marino   switch (code)
264*e4b17023SJohn Marino     {
265*e4b17023SJohn Marino     case SYMBOL_REF:
266*e4b17023SJohn Marino       if (SYMBOL_REF_WEAK (x))
267*e4b17023SJohn Marino 	return 1;
268*e4b17023SJohn Marino       if (!CONSTANT_POOL_ADDRESS_P (x))
269*e4b17023SJohn Marino 	{
270*e4b17023SJohn Marino 	  tree decl;
271*e4b17023SJohn Marino 	  HOST_WIDE_INT decl_size;
272*e4b17023SJohn Marino 
273*e4b17023SJohn Marino 	  if (offset < 0)
274*e4b17023SJohn Marino 	    return 1;
275*e4b17023SJohn Marino 	  if (size == 0)
276*e4b17023SJohn Marino 	    size = GET_MODE_SIZE (mode);
277*e4b17023SJohn Marino 	  if (size == 0)
278*e4b17023SJohn Marino 	    return offset != 0;
279*e4b17023SJohn Marino 
280*e4b17023SJohn Marino 	  /* If the size of the access or of the symbol is unknown,
281*e4b17023SJohn Marino 	     assume the worst.  */
282*e4b17023SJohn Marino 	  decl = SYMBOL_REF_DECL (x);
283*e4b17023SJohn Marino 
284*e4b17023SJohn Marino 	  /* Else check that the access is in bounds.  TODO: restructure
285*e4b17023SJohn Marino 	     expr_size/tree_expr_size/int_expr_size and just use the latter.  */
286*e4b17023SJohn Marino 	  if (!decl)
287*e4b17023SJohn Marino 	    decl_size = -1;
288*e4b17023SJohn Marino 	  else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
289*e4b17023SJohn Marino 	    decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
290*e4b17023SJohn Marino 			 ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
291*e4b17023SJohn Marino 			 : -1);
292*e4b17023SJohn Marino 	  else if (TREE_CODE (decl) == STRING_CST)
293*e4b17023SJohn Marino 	    decl_size = TREE_STRING_LENGTH (decl);
294*e4b17023SJohn Marino 	  else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
295*e4b17023SJohn Marino 	    decl_size = int_size_in_bytes (TREE_TYPE (decl));
296*e4b17023SJohn Marino 	  else
297*e4b17023SJohn Marino 	    decl_size = -1;
298*e4b17023SJohn Marino 
299*e4b17023SJohn Marino 	  return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
300*e4b17023SJohn Marino         }
301*e4b17023SJohn Marino 
302*e4b17023SJohn Marino       return 0;
303*e4b17023SJohn Marino 
304*e4b17023SJohn Marino     case LABEL_REF:
305*e4b17023SJohn Marino       return 0;
306*e4b17023SJohn Marino 
307*e4b17023SJohn Marino     case REG:
308*e4b17023SJohn Marino       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
309*e4b17023SJohn Marino       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
310*e4b17023SJohn Marino 	  || x == stack_pointer_rtx
311*e4b17023SJohn Marino 	  /* The arg pointer varies if it is not a fixed register.  */
312*e4b17023SJohn Marino 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
313*e4b17023SJohn Marino 	return 0;
314*e4b17023SJohn Marino       /* All of the virtual frame registers are stack references.  */
315*e4b17023SJohn Marino       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
316*e4b17023SJohn Marino 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
317*e4b17023SJohn Marino 	return 0;
318*e4b17023SJohn Marino       return 1;
319*e4b17023SJohn Marino 
320*e4b17023SJohn Marino     case CONST:
321*e4b17023SJohn Marino       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
322*e4b17023SJohn Marino 				    mode, unaligned_mems);
323*e4b17023SJohn Marino 
324*e4b17023SJohn Marino     case PLUS:
325*e4b17023SJohn Marino       /* An address is assumed not to trap if:
326*e4b17023SJohn Marino          - it is the pic register plus a constant.  */
327*e4b17023SJohn Marino       if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
328*e4b17023SJohn Marino 	return 0;
329*e4b17023SJohn Marino 
330*e4b17023SJohn Marino       /* - or it is an address that can't trap plus a constant integer,
331*e4b17023SJohn Marino 	   with the proper remainder modulo the mode size if we are
332*e4b17023SJohn Marino 	   considering unaligned memory references.  */
333*e4b17023SJohn Marino       if (CONST_INT_P (XEXP (x, 1))
334*e4b17023SJohn Marino 	  && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
335*e4b17023SJohn Marino 				     size, mode, unaligned_mems))
336*e4b17023SJohn Marino 	return 0;
337*e4b17023SJohn Marino 
338*e4b17023SJohn Marino       return 1;
339*e4b17023SJohn Marino 
340*e4b17023SJohn Marino     case LO_SUM:
341*e4b17023SJohn Marino     case PRE_MODIFY:
342*e4b17023SJohn Marino       return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
343*e4b17023SJohn Marino 				    mode, unaligned_mems);
344*e4b17023SJohn Marino 
345*e4b17023SJohn Marino     case PRE_DEC:
346*e4b17023SJohn Marino     case PRE_INC:
347*e4b17023SJohn Marino     case POST_DEC:
348*e4b17023SJohn Marino     case POST_INC:
349*e4b17023SJohn Marino     case POST_MODIFY:
350*e4b17023SJohn Marino       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
351*e4b17023SJohn Marino 				    mode, unaligned_mems);
352*e4b17023SJohn Marino 
353*e4b17023SJohn Marino     default:
354*e4b17023SJohn Marino       break;
355*e4b17023SJohn Marino     }
356*e4b17023SJohn Marino 
357*e4b17023SJohn Marino   /* If it isn't one of the case above, it can cause a trap.  */
358*e4b17023SJohn Marino   return 1;
359*e4b17023SJohn Marino }
360*e4b17023SJohn Marino 
361*e4b17023SJohn Marino /* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
362*e4b17023SJohn Marino 
363*e4b17023SJohn Marino int
rtx_addr_can_trap_p(const_rtx x)364*e4b17023SJohn Marino rtx_addr_can_trap_p (const_rtx x)
365*e4b17023SJohn Marino {
366*e4b17023SJohn Marino   return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
367*e4b17023SJohn Marino }
368*e4b17023SJohn Marino 
369*e4b17023SJohn Marino /* Return true if X is an address that is known to not be zero.  */
370*e4b17023SJohn Marino 
371*e4b17023SJohn Marino bool
nonzero_address_p(const_rtx x)372*e4b17023SJohn Marino nonzero_address_p (const_rtx x)
373*e4b17023SJohn Marino {
374*e4b17023SJohn Marino   const enum rtx_code code = GET_CODE (x);
375*e4b17023SJohn Marino 
376*e4b17023SJohn Marino   switch (code)
377*e4b17023SJohn Marino     {
378*e4b17023SJohn Marino     case SYMBOL_REF:
379*e4b17023SJohn Marino       return !SYMBOL_REF_WEAK (x);
380*e4b17023SJohn Marino 
381*e4b17023SJohn Marino     case LABEL_REF:
382*e4b17023SJohn Marino       return true;
383*e4b17023SJohn Marino 
384*e4b17023SJohn Marino     case REG:
385*e4b17023SJohn Marino       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
386*e4b17023SJohn Marino       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
387*e4b17023SJohn Marino 	  || x == stack_pointer_rtx
388*e4b17023SJohn Marino 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
389*e4b17023SJohn Marino 	return true;
390*e4b17023SJohn Marino       /* All of the virtual frame registers are stack references.  */
391*e4b17023SJohn Marino       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
392*e4b17023SJohn Marino 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
393*e4b17023SJohn Marino 	return true;
394*e4b17023SJohn Marino       return false;
395*e4b17023SJohn Marino 
396*e4b17023SJohn Marino     case CONST:
397*e4b17023SJohn Marino       return nonzero_address_p (XEXP (x, 0));
398*e4b17023SJohn Marino 
399*e4b17023SJohn Marino     case PLUS:
400*e4b17023SJohn Marino       if (CONST_INT_P (XEXP (x, 1)))
401*e4b17023SJohn Marino         return nonzero_address_p (XEXP (x, 0));
402*e4b17023SJohn Marino       /* Handle PIC references.  */
403*e4b17023SJohn Marino       else if (XEXP (x, 0) == pic_offset_table_rtx
404*e4b17023SJohn Marino 	       && CONSTANT_P (XEXP (x, 1)))
405*e4b17023SJohn Marino 	return true;
406*e4b17023SJohn Marino       return false;
407*e4b17023SJohn Marino 
408*e4b17023SJohn Marino     case PRE_MODIFY:
409*e4b17023SJohn Marino       /* Similar to the above; allow positive offsets.  Further, since
410*e4b17023SJohn Marino 	 auto-inc is only allowed in memories, the register must be a
411*e4b17023SJohn Marino 	 pointer.  */
412*e4b17023SJohn Marino       if (CONST_INT_P (XEXP (x, 1))
413*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) > 0)
414*e4b17023SJohn Marino 	return true;
415*e4b17023SJohn Marino       return nonzero_address_p (XEXP (x, 0));
416*e4b17023SJohn Marino 
417*e4b17023SJohn Marino     case PRE_INC:
418*e4b17023SJohn Marino       /* Similarly.  Further, the offset is always positive.  */
419*e4b17023SJohn Marino       return true;
420*e4b17023SJohn Marino 
421*e4b17023SJohn Marino     case PRE_DEC:
422*e4b17023SJohn Marino     case POST_DEC:
423*e4b17023SJohn Marino     case POST_INC:
424*e4b17023SJohn Marino     case POST_MODIFY:
425*e4b17023SJohn Marino       return nonzero_address_p (XEXP (x, 0));
426*e4b17023SJohn Marino 
427*e4b17023SJohn Marino     case LO_SUM:
428*e4b17023SJohn Marino       return nonzero_address_p (XEXP (x, 1));
429*e4b17023SJohn Marino 
430*e4b17023SJohn Marino     default:
431*e4b17023SJohn Marino       break;
432*e4b17023SJohn Marino     }
433*e4b17023SJohn Marino 
434*e4b17023SJohn Marino   /* If it isn't one of the case above, might be zero.  */
435*e4b17023SJohn Marino   return false;
436*e4b17023SJohn Marino }
437*e4b17023SJohn Marino 
438*e4b17023SJohn Marino /* Return 1 if X refers to a memory location whose address
439*e4b17023SJohn Marino    cannot be compared reliably with constant addresses,
440*e4b17023SJohn Marino    or if X refers to a BLKmode memory object.
441*e4b17023SJohn Marino    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
442*e4b17023SJohn Marino    zero, we are slightly more conservative.  */
443*e4b17023SJohn Marino 
444*e4b17023SJohn Marino bool
rtx_addr_varies_p(const_rtx x,bool for_alias)445*e4b17023SJohn Marino rtx_addr_varies_p (const_rtx x, bool for_alias)
446*e4b17023SJohn Marino {
447*e4b17023SJohn Marino   enum rtx_code code;
448*e4b17023SJohn Marino   int i;
449*e4b17023SJohn Marino   const char *fmt;
450*e4b17023SJohn Marino 
451*e4b17023SJohn Marino   if (x == 0)
452*e4b17023SJohn Marino     return 0;
453*e4b17023SJohn Marino 
454*e4b17023SJohn Marino   code = GET_CODE (x);
455*e4b17023SJohn Marino   if (code == MEM)
456*e4b17023SJohn Marino     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
457*e4b17023SJohn Marino 
458*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
459*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
460*e4b17023SJohn Marino     if (fmt[i] == 'e')
461*e4b17023SJohn Marino       {
462*e4b17023SJohn Marino 	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
463*e4b17023SJohn Marino 	  return 1;
464*e4b17023SJohn Marino       }
465*e4b17023SJohn Marino     else if (fmt[i] == 'E')
466*e4b17023SJohn Marino       {
467*e4b17023SJohn Marino 	int j;
468*e4b17023SJohn Marino 	for (j = 0; j < XVECLEN (x, i); j++)
469*e4b17023SJohn Marino 	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
470*e4b17023SJohn Marino 	    return 1;
471*e4b17023SJohn Marino       }
472*e4b17023SJohn Marino   return 0;
473*e4b17023SJohn Marino }
474*e4b17023SJohn Marino 
475*e4b17023SJohn Marino /* Return the value of the integer term in X, if one is apparent;
476*e4b17023SJohn Marino    otherwise return 0.
477*e4b17023SJohn Marino    Only obvious integer terms are detected.
478*e4b17023SJohn Marino    This is used in cse.c with the `related_value' field.  */
479*e4b17023SJohn Marino 
480*e4b17023SJohn Marino HOST_WIDE_INT
get_integer_term(const_rtx x)481*e4b17023SJohn Marino get_integer_term (const_rtx x)
482*e4b17023SJohn Marino {
483*e4b17023SJohn Marino   if (GET_CODE (x) == CONST)
484*e4b17023SJohn Marino     x = XEXP (x, 0);
485*e4b17023SJohn Marino 
486*e4b17023SJohn Marino   if (GET_CODE (x) == MINUS
487*e4b17023SJohn Marino       && CONST_INT_P (XEXP (x, 1)))
488*e4b17023SJohn Marino     return - INTVAL (XEXP (x, 1));
489*e4b17023SJohn Marino   if (GET_CODE (x) == PLUS
490*e4b17023SJohn Marino       && CONST_INT_P (XEXP (x, 1)))
491*e4b17023SJohn Marino     return INTVAL (XEXP (x, 1));
492*e4b17023SJohn Marino   return 0;
493*e4b17023SJohn Marino }
494*e4b17023SJohn Marino 
495*e4b17023SJohn Marino /* If X is a constant, return the value sans apparent integer term;
496*e4b17023SJohn Marino    otherwise return 0.
497*e4b17023SJohn Marino    Only obvious integer terms are detected.  */
498*e4b17023SJohn Marino 
499*e4b17023SJohn Marino rtx
get_related_value(const_rtx x)500*e4b17023SJohn Marino get_related_value (const_rtx x)
501*e4b17023SJohn Marino {
502*e4b17023SJohn Marino   if (GET_CODE (x) != CONST)
503*e4b17023SJohn Marino     return 0;
504*e4b17023SJohn Marino   x = XEXP (x, 0);
505*e4b17023SJohn Marino   if (GET_CODE (x) == PLUS
506*e4b17023SJohn Marino       && CONST_INT_P (XEXP (x, 1)))
507*e4b17023SJohn Marino     return XEXP (x, 0);
508*e4b17023SJohn Marino   else if (GET_CODE (x) == MINUS
509*e4b17023SJohn Marino 	   && CONST_INT_P (XEXP (x, 1)))
510*e4b17023SJohn Marino     return XEXP (x, 0);
511*e4b17023SJohn Marino   return 0;
512*e4b17023SJohn Marino }
513*e4b17023SJohn Marino 
514*e4b17023SJohn Marino /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
515*e4b17023SJohn Marino    to somewhere in the same object or object_block as SYMBOL.  */
516*e4b17023SJohn Marino 
517*e4b17023SJohn Marino bool
offset_within_block_p(const_rtx symbol,HOST_WIDE_INT offset)518*e4b17023SJohn Marino offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
519*e4b17023SJohn Marino {
520*e4b17023SJohn Marino   tree decl;
521*e4b17023SJohn Marino 
522*e4b17023SJohn Marino   if (GET_CODE (symbol) != SYMBOL_REF)
523*e4b17023SJohn Marino     return false;
524*e4b17023SJohn Marino 
525*e4b17023SJohn Marino   if (offset == 0)
526*e4b17023SJohn Marino     return true;
527*e4b17023SJohn Marino 
528*e4b17023SJohn Marino   if (offset > 0)
529*e4b17023SJohn Marino     {
530*e4b17023SJohn Marino       if (CONSTANT_POOL_ADDRESS_P (symbol)
531*e4b17023SJohn Marino 	  && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
532*e4b17023SJohn Marino 	return true;
533*e4b17023SJohn Marino 
534*e4b17023SJohn Marino       decl = SYMBOL_REF_DECL (symbol);
535*e4b17023SJohn Marino       if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
536*e4b17023SJohn Marino 	return true;
537*e4b17023SJohn Marino     }
538*e4b17023SJohn Marino 
539*e4b17023SJohn Marino   if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
540*e4b17023SJohn Marino       && SYMBOL_REF_BLOCK (symbol)
541*e4b17023SJohn Marino       && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
542*e4b17023SJohn Marino       && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
543*e4b17023SJohn Marino 	  < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
544*e4b17023SJohn Marino     return true;
545*e4b17023SJohn Marino 
546*e4b17023SJohn Marino   return false;
547*e4b17023SJohn Marino }
548*e4b17023SJohn Marino 
549*e4b17023SJohn Marino /* Split X into a base and a constant offset, storing them in *BASE_OUT
550*e4b17023SJohn Marino    and *OFFSET_OUT respectively.  */
551*e4b17023SJohn Marino 
552*e4b17023SJohn Marino void
split_const(rtx x,rtx * base_out,rtx * offset_out)553*e4b17023SJohn Marino split_const (rtx x, rtx *base_out, rtx *offset_out)
554*e4b17023SJohn Marino {
555*e4b17023SJohn Marino   if (GET_CODE (x) == CONST)
556*e4b17023SJohn Marino     {
557*e4b17023SJohn Marino       x = XEXP (x, 0);
558*e4b17023SJohn Marino       if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
559*e4b17023SJohn Marino 	{
560*e4b17023SJohn Marino 	  *base_out = XEXP (x, 0);
561*e4b17023SJohn Marino 	  *offset_out = XEXP (x, 1);
562*e4b17023SJohn Marino 	  return;
563*e4b17023SJohn Marino 	}
564*e4b17023SJohn Marino     }
565*e4b17023SJohn Marino   *base_out = x;
566*e4b17023SJohn Marino   *offset_out = const0_rtx;
567*e4b17023SJohn Marino }
568*e4b17023SJohn Marino 
569*e4b17023SJohn Marino /* Return the number of places FIND appears within X.  If COUNT_DEST is
570*e4b17023SJohn Marino    zero, we do not count occurrences inside the destination of a SET.  */
571*e4b17023SJohn Marino 
572*e4b17023SJohn Marino int
count_occurrences(const_rtx x,const_rtx find,int count_dest)573*e4b17023SJohn Marino count_occurrences (const_rtx x, const_rtx find, int count_dest)
574*e4b17023SJohn Marino {
575*e4b17023SJohn Marino   int i, j;
576*e4b17023SJohn Marino   enum rtx_code code;
577*e4b17023SJohn Marino   const char *format_ptr;
578*e4b17023SJohn Marino   int count;
579*e4b17023SJohn Marino 
580*e4b17023SJohn Marino   if (x == find)
581*e4b17023SJohn Marino     return 1;
582*e4b17023SJohn Marino 
583*e4b17023SJohn Marino   code = GET_CODE (x);
584*e4b17023SJohn Marino 
585*e4b17023SJohn Marino   switch (code)
586*e4b17023SJohn Marino     {
587*e4b17023SJohn Marino     case REG:
588*e4b17023SJohn Marino     case CONST_INT:
589*e4b17023SJohn Marino     case CONST_DOUBLE:
590*e4b17023SJohn Marino     case CONST_FIXED:
591*e4b17023SJohn Marino     case CONST_VECTOR:
592*e4b17023SJohn Marino     case SYMBOL_REF:
593*e4b17023SJohn Marino     case CODE_LABEL:
594*e4b17023SJohn Marino     case PC:
595*e4b17023SJohn Marino     case CC0:
596*e4b17023SJohn Marino       return 0;
597*e4b17023SJohn Marino 
598*e4b17023SJohn Marino     case EXPR_LIST:
599*e4b17023SJohn Marino       count = count_occurrences (XEXP (x, 0), find, count_dest);
600*e4b17023SJohn Marino       if (XEXP (x, 1))
601*e4b17023SJohn Marino 	count += count_occurrences (XEXP (x, 1), find, count_dest);
602*e4b17023SJohn Marino       return count;
603*e4b17023SJohn Marino 
604*e4b17023SJohn Marino     case MEM:
605*e4b17023SJohn Marino       if (MEM_P (find) && rtx_equal_p (x, find))
606*e4b17023SJohn Marino 	return 1;
607*e4b17023SJohn Marino       break;
608*e4b17023SJohn Marino 
609*e4b17023SJohn Marino     case SET:
610*e4b17023SJohn Marino       if (SET_DEST (x) == find && ! count_dest)
611*e4b17023SJohn Marino 	return count_occurrences (SET_SRC (x), find, count_dest);
612*e4b17023SJohn Marino       break;
613*e4b17023SJohn Marino 
614*e4b17023SJohn Marino     default:
615*e4b17023SJohn Marino       break;
616*e4b17023SJohn Marino     }
617*e4b17023SJohn Marino 
618*e4b17023SJohn Marino   format_ptr = GET_RTX_FORMAT (code);
619*e4b17023SJohn Marino   count = 0;
620*e4b17023SJohn Marino 
621*e4b17023SJohn Marino   for (i = 0; i < GET_RTX_LENGTH (code); i++)
622*e4b17023SJohn Marino     {
623*e4b17023SJohn Marino       switch (*format_ptr++)
624*e4b17023SJohn Marino 	{
625*e4b17023SJohn Marino 	case 'e':
626*e4b17023SJohn Marino 	  count += count_occurrences (XEXP (x, i), find, count_dest);
627*e4b17023SJohn Marino 	  break;
628*e4b17023SJohn Marino 
629*e4b17023SJohn Marino 	case 'E':
630*e4b17023SJohn Marino 	  for (j = 0; j < XVECLEN (x, i); j++)
631*e4b17023SJohn Marino 	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
632*e4b17023SJohn Marino 	  break;
633*e4b17023SJohn Marino 	}
634*e4b17023SJohn Marino     }
635*e4b17023SJohn Marino   return count;
636*e4b17023SJohn Marino }
637*e4b17023SJohn Marino 
638*e4b17023SJohn Marino 
639*e4b17023SJohn Marino /* Nonzero if register REG appears somewhere within IN.
640*e4b17023SJohn Marino    Also works if REG is not a register; in this case it checks
641*e4b17023SJohn Marino    for a subexpression of IN that is Lisp "equal" to REG.  */
642*e4b17023SJohn Marino 
643*e4b17023SJohn Marino int
reg_mentioned_p(const_rtx reg,const_rtx in)644*e4b17023SJohn Marino reg_mentioned_p (const_rtx reg, const_rtx in)
645*e4b17023SJohn Marino {
646*e4b17023SJohn Marino   const char *fmt;
647*e4b17023SJohn Marino   int i;
648*e4b17023SJohn Marino   enum rtx_code code;
649*e4b17023SJohn Marino 
650*e4b17023SJohn Marino   if (in == 0)
651*e4b17023SJohn Marino     return 0;
652*e4b17023SJohn Marino 
653*e4b17023SJohn Marino   if (reg == in)
654*e4b17023SJohn Marino     return 1;
655*e4b17023SJohn Marino 
656*e4b17023SJohn Marino   if (GET_CODE (in) == LABEL_REF)
657*e4b17023SJohn Marino     return reg == XEXP (in, 0);
658*e4b17023SJohn Marino 
659*e4b17023SJohn Marino   code = GET_CODE (in);
660*e4b17023SJohn Marino 
661*e4b17023SJohn Marino   switch (code)
662*e4b17023SJohn Marino     {
663*e4b17023SJohn Marino       /* Compare registers by number.  */
664*e4b17023SJohn Marino     case REG:
665*e4b17023SJohn Marino       return REG_P (reg) && REGNO (in) == REGNO (reg);
666*e4b17023SJohn Marino 
667*e4b17023SJohn Marino       /* These codes have no constituent expressions
668*e4b17023SJohn Marino 	 and are unique.  */
669*e4b17023SJohn Marino     case SCRATCH:
670*e4b17023SJohn Marino     case CC0:
671*e4b17023SJohn Marino     case PC:
672*e4b17023SJohn Marino       return 0;
673*e4b17023SJohn Marino 
674*e4b17023SJohn Marino     case CONST_INT:
675*e4b17023SJohn Marino     case CONST_VECTOR:
676*e4b17023SJohn Marino     case CONST_DOUBLE:
677*e4b17023SJohn Marino     case CONST_FIXED:
678*e4b17023SJohn Marino       /* These are kept unique for a given value.  */
679*e4b17023SJohn Marino       return 0;
680*e4b17023SJohn Marino 
681*e4b17023SJohn Marino     default:
682*e4b17023SJohn Marino       break;
683*e4b17023SJohn Marino     }
684*e4b17023SJohn Marino 
685*e4b17023SJohn Marino   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
686*e4b17023SJohn Marino     return 1;
687*e4b17023SJohn Marino 
688*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
689*e4b17023SJohn Marino 
690*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
691*e4b17023SJohn Marino     {
692*e4b17023SJohn Marino       if (fmt[i] == 'E')
693*e4b17023SJohn Marino 	{
694*e4b17023SJohn Marino 	  int j;
695*e4b17023SJohn Marino 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
696*e4b17023SJohn Marino 	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
697*e4b17023SJohn Marino 	      return 1;
698*e4b17023SJohn Marino 	}
699*e4b17023SJohn Marino       else if (fmt[i] == 'e'
700*e4b17023SJohn Marino 	       && reg_mentioned_p (reg, XEXP (in, i)))
701*e4b17023SJohn Marino 	return 1;
702*e4b17023SJohn Marino     }
703*e4b17023SJohn Marino   return 0;
704*e4b17023SJohn Marino }
705*e4b17023SJohn Marino 
706*e4b17023SJohn Marino /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
707*e4b17023SJohn Marino    no CODE_LABEL insn.  */
708*e4b17023SJohn Marino 
709*e4b17023SJohn Marino int
no_labels_between_p(const_rtx beg,const_rtx end)710*e4b17023SJohn Marino no_labels_between_p (const_rtx beg, const_rtx end)
711*e4b17023SJohn Marino {
712*e4b17023SJohn Marino   rtx p;
713*e4b17023SJohn Marino   if (beg == end)
714*e4b17023SJohn Marino     return 0;
715*e4b17023SJohn Marino   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
716*e4b17023SJohn Marino     if (LABEL_P (p))
717*e4b17023SJohn Marino       return 0;
718*e4b17023SJohn Marino   return 1;
719*e4b17023SJohn Marino }
720*e4b17023SJohn Marino 
721*e4b17023SJohn Marino /* Nonzero if register REG is used in an insn between
722*e4b17023SJohn Marino    FROM_INSN and TO_INSN (exclusive of those two).  */
723*e4b17023SJohn Marino 
724*e4b17023SJohn Marino int
reg_used_between_p(const_rtx reg,const_rtx from_insn,const_rtx to_insn)725*e4b17023SJohn Marino reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
726*e4b17023SJohn Marino {
727*e4b17023SJohn Marino   rtx insn;
728*e4b17023SJohn Marino 
729*e4b17023SJohn Marino   if (from_insn == to_insn)
730*e4b17023SJohn Marino     return 0;
731*e4b17023SJohn Marino 
732*e4b17023SJohn Marino   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
733*e4b17023SJohn Marino     if (NONDEBUG_INSN_P (insn)
734*e4b17023SJohn Marino 	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
735*e4b17023SJohn Marino 	   || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
736*e4b17023SJohn Marino       return 1;
737*e4b17023SJohn Marino   return 0;
738*e4b17023SJohn Marino }
739*e4b17023SJohn Marino 
740*e4b17023SJohn Marino /* Nonzero if the old value of X, a register, is referenced in BODY.  If X
741*e4b17023SJohn Marino    is entirely replaced by a new value and the only use is as a SET_DEST,
742*e4b17023SJohn Marino    we do not consider it a reference.  */
743*e4b17023SJohn Marino 
744*e4b17023SJohn Marino int
reg_referenced_p(const_rtx x,const_rtx body)745*e4b17023SJohn Marino reg_referenced_p (const_rtx x, const_rtx body)
746*e4b17023SJohn Marino {
747*e4b17023SJohn Marino   int i;
748*e4b17023SJohn Marino 
749*e4b17023SJohn Marino   switch (GET_CODE (body))
750*e4b17023SJohn Marino     {
751*e4b17023SJohn Marino     case SET:
752*e4b17023SJohn Marino       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
753*e4b17023SJohn Marino 	return 1;
754*e4b17023SJohn Marino 
755*e4b17023SJohn Marino       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
756*e4b17023SJohn Marino 	 of a REG that occupies all of the REG, the insn references X if
757*e4b17023SJohn Marino 	 it is mentioned in the destination.  */
758*e4b17023SJohn Marino       if (GET_CODE (SET_DEST (body)) != CC0
759*e4b17023SJohn Marino 	  && GET_CODE (SET_DEST (body)) != PC
760*e4b17023SJohn Marino 	  && !REG_P (SET_DEST (body))
761*e4b17023SJohn Marino 	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
762*e4b17023SJohn Marino 		&& REG_P (SUBREG_REG (SET_DEST (body)))
763*e4b17023SJohn Marino 		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
764*e4b17023SJohn Marino 		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
765*e4b17023SJohn Marino 		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
766*e4b17023SJohn Marino 			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
767*e4b17023SJohn Marino 	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
768*e4b17023SJohn Marino 	return 1;
769*e4b17023SJohn Marino       return 0;
770*e4b17023SJohn Marino 
771*e4b17023SJohn Marino     case ASM_OPERANDS:
772*e4b17023SJohn Marino       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
773*e4b17023SJohn Marino 	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
774*e4b17023SJohn Marino 	  return 1;
775*e4b17023SJohn Marino       return 0;
776*e4b17023SJohn Marino 
777*e4b17023SJohn Marino     case CALL:
778*e4b17023SJohn Marino     case USE:
779*e4b17023SJohn Marino     case IF_THEN_ELSE:
780*e4b17023SJohn Marino       return reg_overlap_mentioned_p (x, body);
781*e4b17023SJohn Marino 
782*e4b17023SJohn Marino     case TRAP_IF:
783*e4b17023SJohn Marino       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
784*e4b17023SJohn Marino 
785*e4b17023SJohn Marino     case PREFETCH:
786*e4b17023SJohn Marino       return reg_overlap_mentioned_p (x, XEXP (body, 0));
787*e4b17023SJohn Marino 
788*e4b17023SJohn Marino     case UNSPEC:
789*e4b17023SJohn Marino     case UNSPEC_VOLATILE:
790*e4b17023SJohn Marino       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
791*e4b17023SJohn Marino 	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
792*e4b17023SJohn Marino 	  return 1;
793*e4b17023SJohn Marino       return 0;
794*e4b17023SJohn Marino 
795*e4b17023SJohn Marino     case PARALLEL:
796*e4b17023SJohn Marino       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
797*e4b17023SJohn Marino 	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
798*e4b17023SJohn Marino 	  return 1;
799*e4b17023SJohn Marino       return 0;
800*e4b17023SJohn Marino 
801*e4b17023SJohn Marino     case CLOBBER:
802*e4b17023SJohn Marino       if (MEM_P (XEXP (body, 0)))
803*e4b17023SJohn Marino 	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
804*e4b17023SJohn Marino 	  return 1;
805*e4b17023SJohn Marino       return 0;
806*e4b17023SJohn Marino 
807*e4b17023SJohn Marino     case COND_EXEC:
808*e4b17023SJohn Marino       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
809*e4b17023SJohn Marino 	return 1;
810*e4b17023SJohn Marino       return reg_referenced_p (x, COND_EXEC_CODE (body));
811*e4b17023SJohn Marino 
812*e4b17023SJohn Marino     default:
813*e4b17023SJohn Marino       return 0;
814*e4b17023SJohn Marino     }
815*e4b17023SJohn Marino }
816*e4b17023SJohn Marino 
817*e4b17023SJohn Marino /* Nonzero if register REG is set or clobbered in an insn between
818*e4b17023SJohn Marino    FROM_INSN and TO_INSN (exclusive of those two).  */
819*e4b17023SJohn Marino 
820*e4b17023SJohn Marino int
reg_set_between_p(const_rtx reg,const_rtx from_insn,const_rtx to_insn)821*e4b17023SJohn Marino reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
822*e4b17023SJohn Marino {
823*e4b17023SJohn Marino   const_rtx insn;
824*e4b17023SJohn Marino 
825*e4b17023SJohn Marino   if (from_insn == to_insn)
826*e4b17023SJohn Marino     return 0;
827*e4b17023SJohn Marino 
828*e4b17023SJohn Marino   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
829*e4b17023SJohn Marino     if (INSN_P (insn) && reg_set_p (reg, insn))
830*e4b17023SJohn Marino       return 1;
831*e4b17023SJohn Marino   return 0;
832*e4b17023SJohn Marino }
833*e4b17023SJohn Marino 
834*e4b17023SJohn Marino /* Internals of reg_set_between_p.  */
835*e4b17023SJohn Marino int
reg_set_p(const_rtx reg,const_rtx insn)836*e4b17023SJohn Marino reg_set_p (const_rtx reg, const_rtx insn)
837*e4b17023SJohn Marino {
838*e4b17023SJohn Marino   /* We can be passed an insn or part of one.  If we are passed an insn,
839*e4b17023SJohn Marino      check if a side-effect of the insn clobbers REG.  */
840*e4b17023SJohn Marino   if (INSN_P (insn)
841*e4b17023SJohn Marino       && (FIND_REG_INC_NOTE (insn, reg)
842*e4b17023SJohn Marino 	  || (CALL_P (insn)
843*e4b17023SJohn Marino 	      && ((REG_P (reg)
844*e4b17023SJohn Marino 		   && REGNO (reg) < FIRST_PSEUDO_REGISTER
845*e4b17023SJohn Marino 		   && overlaps_hard_reg_set_p (regs_invalidated_by_call,
846*e4b17023SJohn Marino 					       GET_MODE (reg), REGNO (reg)))
847*e4b17023SJohn Marino 		  || MEM_P (reg)
848*e4b17023SJohn Marino 		  || find_reg_fusage (insn, CLOBBER, reg)))))
849*e4b17023SJohn Marino     return 1;
850*e4b17023SJohn Marino 
851*e4b17023SJohn Marino   return set_of (reg, insn) != NULL_RTX;
852*e4b17023SJohn Marino }
853*e4b17023SJohn Marino 
854*e4b17023SJohn Marino /* Similar to reg_set_between_p, but check all registers in X.  Return 0
855*e4b17023SJohn Marino    only if none of them are modified between START and END.  Return 1 if
856*e4b17023SJohn Marino    X contains a MEM; this routine does use memory aliasing.  */
857*e4b17023SJohn Marino 
858*e4b17023SJohn Marino int
modified_between_p(const_rtx x,const_rtx start,const_rtx end)859*e4b17023SJohn Marino modified_between_p (const_rtx x, const_rtx start, const_rtx end)
860*e4b17023SJohn Marino {
861*e4b17023SJohn Marino   const enum rtx_code code = GET_CODE (x);
862*e4b17023SJohn Marino   const char *fmt;
863*e4b17023SJohn Marino   int i, j;
864*e4b17023SJohn Marino   rtx insn;
865*e4b17023SJohn Marino 
866*e4b17023SJohn Marino   if (start == end)
867*e4b17023SJohn Marino     return 0;
868*e4b17023SJohn Marino 
869*e4b17023SJohn Marino   switch (code)
870*e4b17023SJohn Marino     {
871*e4b17023SJohn Marino     case CONST_INT:
872*e4b17023SJohn Marino     case CONST_DOUBLE:
873*e4b17023SJohn Marino     case CONST_FIXED:
874*e4b17023SJohn Marino     case CONST_VECTOR:
875*e4b17023SJohn Marino     case CONST:
876*e4b17023SJohn Marino     case SYMBOL_REF:
877*e4b17023SJohn Marino     case LABEL_REF:
878*e4b17023SJohn Marino       return 0;
879*e4b17023SJohn Marino 
880*e4b17023SJohn Marino     case PC:
881*e4b17023SJohn Marino     case CC0:
882*e4b17023SJohn Marino       return 1;
883*e4b17023SJohn Marino 
884*e4b17023SJohn Marino     case MEM:
885*e4b17023SJohn Marino       if (modified_between_p (XEXP (x, 0), start, end))
886*e4b17023SJohn Marino 	return 1;
887*e4b17023SJohn Marino       if (MEM_READONLY_P (x))
888*e4b17023SJohn Marino 	return 0;
889*e4b17023SJohn Marino       for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
890*e4b17023SJohn Marino 	if (memory_modified_in_insn_p (x, insn))
891*e4b17023SJohn Marino 	  return 1;
892*e4b17023SJohn Marino       return 0;
893*e4b17023SJohn Marino       break;
894*e4b17023SJohn Marino 
895*e4b17023SJohn Marino     case REG:
896*e4b17023SJohn Marino       return reg_set_between_p (x, start, end);
897*e4b17023SJohn Marino 
898*e4b17023SJohn Marino     default:
899*e4b17023SJohn Marino       break;
900*e4b17023SJohn Marino     }
901*e4b17023SJohn Marino 
902*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
903*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
904*e4b17023SJohn Marino     {
905*e4b17023SJohn Marino       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
906*e4b17023SJohn Marino 	return 1;
907*e4b17023SJohn Marino 
908*e4b17023SJohn Marino       else if (fmt[i] == 'E')
909*e4b17023SJohn Marino 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
910*e4b17023SJohn Marino 	  if (modified_between_p (XVECEXP (x, i, j), start, end))
911*e4b17023SJohn Marino 	    return 1;
912*e4b17023SJohn Marino     }
913*e4b17023SJohn Marino 
914*e4b17023SJohn Marino   return 0;
915*e4b17023SJohn Marino }
916*e4b17023SJohn Marino 
917*e4b17023SJohn Marino /* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
918*e4b17023SJohn Marino    of them are modified in INSN.  Return 1 if X contains a MEM; this routine
919*e4b17023SJohn Marino    does use memory aliasing.  */
920*e4b17023SJohn Marino 
921*e4b17023SJohn Marino int
modified_in_p(const_rtx x,const_rtx insn)922*e4b17023SJohn Marino modified_in_p (const_rtx x, const_rtx insn)
923*e4b17023SJohn Marino {
924*e4b17023SJohn Marino   const enum rtx_code code = GET_CODE (x);
925*e4b17023SJohn Marino   const char *fmt;
926*e4b17023SJohn Marino   int i, j;
927*e4b17023SJohn Marino 
928*e4b17023SJohn Marino   switch (code)
929*e4b17023SJohn Marino     {
930*e4b17023SJohn Marino     case CONST_INT:
931*e4b17023SJohn Marino     case CONST_DOUBLE:
932*e4b17023SJohn Marino     case CONST_FIXED:
933*e4b17023SJohn Marino     case CONST_VECTOR:
934*e4b17023SJohn Marino     case CONST:
935*e4b17023SJohn Marino     case SYMBOL_REF:
936*e4b17023SJohn Marino     case LABEL_REF:
937*e4b17023SJohn Marino       return 0;
938*e4b17023SJohn Marino 
939*e4b17023SJohn Marino     case PC:
940*e4b17023SJohn Marino     case CC0:
941*e4b17023SJohn Marino       return 1;
942*e4b17023SJohn Marino 
943*e4b17023SJohn Marino     case MEM:
944*e4b17023SJohn Marino       if (modified_in_p (XEXP (x, 0), insn))
945*e4b17023SJohn Marino 	return 1;
946*e4b17023SJohn Marino       if (MEM_READONLY_P (x))
947*e4b17023SJohn Marino 	return 0;
948*e4b17023SJohn Marino       if (memory_modified_in_insn_p (x, insn))
949*e4b17023SJohn Marino 	return 1;
950*e4b17023SJohn Marino       return 0;
951*e4b17023SJohn Marino       break;
952*e4b17023SJohn Marino 
953*e4b17023SJohn Marino     case REG:
954*e4b17023SJohn Marino       return reg_set_p (x, insn);
955*e4b17023SJohn Marino 
956*e4b17023SJohn Marino     default:
957*e4b17023SJohn Marino       break;
958*e4b17023SJohn Marino     }
959*e4b17023SJohn Marino 
960*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
961*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
962*e4b17023SJohn Marino     {
963*e4b17023SJohn Marino       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
964*e4b17023SJohn Marino 	return 1;
965*e4b17023SJohn Marino 
966*e4b17023SJohn Marino       else if (fmt[i] == 'E')
967*e4b17023SJohn Marino 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
968*e4b17023SJohn Marino 	  if (modified_in_p (XVECEXP (x, i, j), insn))
969*e4b17023SJohn Marino 	    return 1;
970*e4b17023SJohn Marino     }
971*e4b17023SJohn Marino 
972*e4b17023SJohn Marino   return 0;
973*e4b17023SJohn Marino }
974*e4b17023SJohn Marino 
975*e4b17023SJohn Marino /* Helper function for set_of.  */
976*e4b17023SJohn Marino struct set_of_data
977*e4b17023SJohn Marino   {
978*e4b17023SJohn Marino     const_rtx found;
979*e4b17023SJohn Marino     const_rtx pat;
980*e4b17023SJohn Marino   };
981*e4b17023SJohn Marino 
982*e4b17023SJohn Marino static void
set_of_1(rtx x,const_rtx pat,void * data1)983*e4b17023SJohn Marino set_of_1 (rtx x, const_rtx pat, void *data1)
984*e4b17023SJohn Marino {
985*e4b17023SJohn Marino   struct set_of_data *const data = (struct set_of_data *) (data1);
986*e4b17023SJohn Marino   if (rtx_equal_p (x, data->pat)
987*e4b17023SJohn Marino       || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
988*e4b17023SJohn Marino     data->found = pat;
989*e4b17023SJohn Marino }
990*e4b17023SJohn Marino 
991*e4b17023SJohn Marino /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
992*e4b17023SJohn Marino    (either directly or via STRICT_LOW_PART and similar modifiers).  */
993*e4b17023SJohn Marino const_rtx
set_of(const_rtx pat,const_rtx insn)994*e4b17023SJohn Marino set_of (const_rtx pat, const_rtx insn)
995*e4b17023SJohn Marino {
996*e4b17023SJohn Marino   struct set_of_data data;
997*e4b17023SJohn Marino   data.found = NULL_RTX;
998*e4b17023SJohn Marino   data.pat = pat;
999*e4b17023SJohn Marino   note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1000*e4b17023SJohn Marino   return data.found;
1001*e4b17023SJohn Marino }
1002*e4b17023SJohn Marino 
1003*e4b17023SJohn Marino /* This function, called through note_stores, collects sets and
1004*e4b17023SJohn Marino    clobbers of hard registers in a HARD_REG_SET, which is pointed to
1005*e4b17023SJohn Marino    by DATA.  */
1006*e4b17023SJohn Marino void
record_hard_reg_sets(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)1007*e4b17023SJohn Marino record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1008*e4b17023SJohn Marino {
1009*e4b17023SJohn Marino   HARD_REG_SET *pset = (HARD_REG_SET *)data;
1010*e4b17023SJohn Marino   if (REG_P (x) && HARD_REGISTER_P (x))
1011*e4b17023SJohn Marino     add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1012*e4b17023SJohn Marino }
1013*e4b17023SJohn Marino 
1014*e4b17023SJohn Marino /* Examine INSN, and compute the set of hard registers written by it.
1015*e4b17023SJohn Marino    Store it in *PSET.  Should only be called after reload.  */
1016*e4b17023SJohn Marino void
find_all_hard_reg_sets(const_rtx insn,HARD_REG_SET * pset)1017*e4b17023SJohn Marino find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset)
1018*e4b17023SJohn Marino {
1019*e4b17023SJohn Marino   rtx link;
1020*e4b17023SJohn Marino 
1021*e4b17023SJohn Marino   CLEAR_HARD_REG_SET (*pset);
1022*e4b17023SJohn Marino   note_stores (PATTERN (insn), record_hard_reg_sets, pset);
1023*e4b17023SJohn Marino   if (CALL_P (insn))
1024*e4b17023SJohn Marino     IOR_HARD_REG_SET (*pset, call_used_reg_set);
1025*e4b17023SJohn Marino   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1026*e4b17023SJohn Marino     if (REG_NOTE_KIND (link) == REG_INC)
1027*e4b17023SJohn Marino       record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1028*e4b17023SJohn Marino }
1029*e4b17023SJohn Marino 
1030*e4b17023SJohn Marino /* A for_each_rtx subroutine of record_hard_reg_uses.  */
1031*e4b17023SJohn Marino static int
record_hard_reg_uses_1(rtx * px,void * data)1032*e4b17023SJohn Marino record_hard_reg_uses_1 (rtx *px, void *data)
1033*e4b17023SJohn Marino {
1034*e4b17023SJohn Marino   rtx x = *px;
1035*e4b17023SJohn Marino   HARD_REG_SET *pused = (HARD_REG_SET *)data;
1036*e4b17023SJohn Marino 
1037*e4b17023SJohn Marino   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1038*e4b17023SJohn Marino     {
1039*e4b17023SJohn Marino       int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
1040*e4b17023SJohn Marino       while (nregs-- > 0)
1041*e4b17023SJohn Marino 	SET_HARD_REG_BIT (*pused, REGNO (x) + nregs);
1042*e4b17023SJohn Marino     }
1043*e4b17023SJohn Marino   return 0;
1044*e4b17023SJohn Marino }
1045*e4b17023SJohn Marino 
1046*e4b17023SJohn Marino /* Like record_hard_reg_sets, but called through note_uses.  */
1047*e4b17023SJohn Marino void
record_hard_reg_uses(rtx * px,void * data)1048*e4b17023SJohn Marino record_hard_reg_uses (rtx *px, void *data)
1049*e4b17023SJohn Marino {
1050*e4b17023SJohn Marino   for_each_rtx (px, record_hard_reg_uses_1, data);
1051*e4b17023SJohn Marino }
1052*e4b17023SJohn Marino 
1053*e4b17023SJohn Marino /* Given an INSN, return a SET expression if this insn has only a single SET.
1054*e4b17023SJohn Marino    It may also have CLOBBERs, USEs, or SET whose output
1055*e4b17023SJohn Marino    will not be used, which we ignore.  */
1056*e4b17023SJohn Marino 
1057*e4b17023SJohn Marino rtx
single_set_2(const_rtx insn,const_rtx pat)1058*e4b17023SJohn Marino single_set_2 (const_rtx insn, const_rtx pat)
1059*e4b17023SJohn Marino {
1060*e4b17023SJohn Marino   rtx set = NULL;
1061*e4b17023SJohn Marino   int set_verified = 1;
1062*e4b17023SJohn Marino   int i;
1063*e4b17023SJohn Marino 
1064*e4b17023SJohn Marino   if (GET_CODE (pat) == PARALLEL)
1065*e4b17023SJohn Marino     {
1066*e4b17023SJohn Marino       for (i = 0; i < XVECLEN (pat, 0); i++)
1067*e4b17023SJohn Marino 	{
1068*e4b17023SJohn Marino 	  rtx sub = XVECEXP (pat, 0, i);
1069*e4b17023SJohn Marino 	  switch (GET_CODE (sub))
1070*e4b17023SJohn Marino 	    {
1071*e4b17023SJohn Marino 	    case USE:
1072*e4b17023SJohn Marino 	    case CLOBBER:
1073*e4b17023SJohn Marino 	      break;
1074*e4b17023SJohn Marino 
1075*e4b17023SJohn Marino 	    case SET:
1076*e4b17023SJohn Marino 	      /* We can consider insns having multiple sets, where all
1077*e4b17023SJohn Marino 		 but one are dead as single set insns.  In common case
1078*e4b17023SJohn Marino 		 only single set is present in the pattern so we want
1079*e4b17023SJohn Marino 		 to avoid checking for REG_UNUSED notes unless necessary.
1080*e4b17023SJohn Marino 
1081*e4b17023SJohn Marino 		 When we reach set first time, we just expect this is
1082*e4b17023SJohn Marino 		 the single set we are looking for and only when more
1083*e4b17023SJohn Marino 		 sets are found in the insn, we check them.  */
1084*e4b17023SJohn Marino 	      if (!set_verified)
1085*e4b17023SJohn Marino 		{
1086*e4b17023SJohn Marino 		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1087*e4b17023SJohn Marino 		      && !side_effects_p (set))
1088*e4b17023SJohn Marino 		    set = NULL;
1089*e4b17023SJohn Marino 		  else
1090*e4b17023SJohn Marino 		    set_verified = 1;
1091*e4b17023SJohn Marino 		}
1092*e4b17023SJohn Marino 	      if (!set)
1093*e4b17023SJohn Marino 		set = sub, set_verified = 0;
1094*e4b17023SJohn Marino 	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1095*e4b17023SJohn Marino 		       || side_effects_p (sub))
1096*e4b17023SJohn Marino 		return NULL_RTX;
1097*e4b17023SJohn Marino 	      break;
1098*e4b17023SJohn Marino 
1099*e4b17023SJohn Marino 	    default:
1100*e4b17023SJohn Marino 	      return NULL_RTX;
1101*e4b17023SJohn Marino 	    }
1102*e4b17023SJohn Marino 	}
1103*e4b17023SJohn Marino     }
1104*e4b17023SJohn Marino   return set;
1105*e4b17023SJohn Marino }
1106*e4b17023SJohn Marino 
1107*e4b17023SJohn Marino /* Given an INSN, return nonzero if it has more than one SET, else return
1108*e4b17023SJohn Marino    zero.  */
1109*e4b17023SJohn Marino 
1110*e4b17023SJohn Marino int
multiple_sets(const_rtx insn)1111*e4b17023SJohn Marino multiple_sets (const_rtx insn)
1112*e4b17023SJohn Marino {
1113*e4b17023SJohn Marino   int found;
1114*e4b17023SJohn Marino   int i;
1115*e4b17023SJohn Marino 
1116*e4b17023SJohn Marino   /* INSN must be an insn.  */
1117*e4b17023SJohn Marino   if (! INSN_P (insn))
1118*e4b17023SJohn Marino     return 0;
1119*e4b17023SJohn Marino 
1120*e4b17023SJohn Marino   /* Only a PARALLEL can have multiple SETs.  */
1121*e4b17023SJohn Marino   if (GET_CODE (PATTERN (insn)) == PARALLEL)
1122*e4b17023SJohn Marino     {
1123*e4b17023SJohn Marino       for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1124*e4b17023SJohn Marino 	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1125*e4b17023SJohn Marino 	  {
1126*e4b17023SJohn Marino 	    /* If we have already found a SET, then return now.  */
1127*e4b17023SJohn Marino 	    if (found)
1128*e4b17023SJohn Marino 	      return 1;
1129*e4b17023SJohn Marino 	    else
1130*e4b17023SJohn Marino 	      found = 1;
1131*e4b17023SJohn Marino 	  }
1132*e4b17023SJohn Marino     }
1133*e4b17023SJohn Marino 
1134*e4b17023SJohn Marino   /* Either zero or one SET.  */
1135*e4b17023SJohn Marino   return 0;
1136*e4b17023SJohn Marino }
1137*e4b17023SJohn Marino 
1138*e4b17023SJohn Marino /* Return nonzero if the destination of SET equals the source
1139*e4b17023SJohn Marino    and there are no side effects.  */
1140*e4b17023SJohn Marino 
1141*e4b17023SJohn Marino int
set_noop_p(const_rtx set)1142*e4b17023SJohn Marino set_noop_p (const_rtx set)
1143*e4b17023SJohn Marino {
1144*e4b17023SJohn Marino   rtx src = SET_SRC (set);
1145*e4b17023SJohn Marino   rtx dst = SET_DEST (set);
1146*e4b17023SJohn Marino 
1147*e4b17023SJohn Marino   if (dst == pc_rtx && src == pc_rtx)
1148*e4b17023SJohn Marino     return 1;
1149*e4b17023SJohn Marino 
1150*e4b17023SJohn Marino   if (MEM_P (dst) && MEM_P (src))
1151*e4b17023SJohn Marino     return rtx_equal_p (dst, src) && !side_effects_p (dst);
1152*e4b17023SJohn Marino 
1153*e4b17023SJohn Marino   if (GET_CODE (dst) == ZERO_EXTRACT)
1154*e4b17023SJohn Marino     return rtx_equal_p (XEXP (dst, 0), src)
1155*e4b17023SJohn Marino 	   && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1156*e4b17023SJohn Marino 	   && !side_effects_p (src);
1157*e4b17023SJohn Marino 
1158*e4b17023SJohn Marino   if (GET_CODE (dst) == STRICT_LOW_PART)
1159*e4b17023SJohn Marino     dst = XEXP (dst, 0);
1160*e4b17023SJohn Marino 
1161*e4b17023SJohn Marino   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1162*e4b17023SJohn Marino     {
1163*e4b17023SJohn Marino       if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1164*e4b17023SJohn Marino 	return 0;
1165*e4b17023SJohn Marino       src = SUBREG_REG (src);
1166*e4b17023SJohn Marino       dst = SUBREG_REG (dst);
1167*e4b17023SJohn Marino     }
1168*e4b17023SJohn Marino 
1169*e4b17023SJohn Marino   return (REG_P (src) && REG_P (dst)
1170*e4b17023SJohn Marino 	  && REGNO (src) == REGNO (dst));
1171*e4b17023SJohn Marino }
1172*e4b17023SJohn Marino 
1173*e4b17023SJohn Marino /* Return nonzero if an insn consists only of SETs, each of which only sets a
1174*e4b17023SJohn Marino    value to itself.  */
1175*e4b17023SJohn Marino 
1176*e4b17023SJohn Marino int
noop_move_p(const_rtx insn)1177*e4b17023SJohn Marino noop_move_p (const_rtx insn)
1178*e4b17023SJohn Marino {
1179*e4b17023SJohn Marino   rtx pat = PATTERN (insn);
1180*e4b17023SJohn Marino 
1181*e4b17023SJohn Marino   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1182*e4b17023SJohn Marino     return 1;
1183*e4b17023SJohn Marino 
1184*e4b17023SJohn Marino   /* Insns carrying these notes are useful later on.  */
1185*e4b17023SJohn Marino   if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1186*e4b17023SJohn Marino     return 0;
1187*e4b17023SJohn Marino 
1188*e4b17023SJohn Marino   if (GET_CODE (pat) == SET && set_noop_p (pat))
1189*e4b17023SJohn Marino     return 1;
1190*e4b17023SJohn Marino 
1191*e4b17023SJohn Marino   if (GET_CODE (pat) == PARALLEL)
1192*e4b17023SJohn Marino     {
1193*e4b17023SJohn Marino       int i;
1194*e4b17023SJohn Marino       /* If nothing but SETs of registers to themselves,
1195*e4b17023SJohn Marino 	 this insn can also be deleted.  */
1196*e4b17023SJohn Marino       for (i = 0; i < XVECLEN (pat, 0); i++)
1197*e4b17023SJohn Marino 	{
1198*e4b17023SJohn Marino 	  rtx tem = XVECEXP (pat, 0, i);
1199*e4b17023SJohn Marino 
1200*e4b17023SJohn Marino 	  if (GET_CODE (tem) == USE
1201*e4b17023SJohn Marino 	      || GET_CODE (tem) == CLOBBER)
1202*e4b17023SJohn Marino 	    continue;
1203*e4b17023SJohn Marino 
1204*e4b17023SJohn Marino 	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1205*e4b17023SJohn Marino 	    return 0;
1206*e4b17023SJohn Marino 	}
1207*e4b17023SJohn Marino 
1208*e4b17023SJohn Marino       return 1;
1209*e4b17023SJohn Marino     }
1210*e4b17023SJohn Marino   return 0;
1211*e4b17023SJohn Marino }
1212*e4b17023SJohn Marino 
1213*e4b17023SJohn Marino 
1214*e4b17023SJohn Marino /* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1215*e4b17023SJohn Marino    is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1216*e4b17023SJohn Marino    If the object was modified, if we hit a partial assignment to X, or hit a
1217*e4b17023SJohn Marino    CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1218*e4b17023SJohn Marino    point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1219*e4b17023SJohn Marino    be the src.  */
1220*e4b17023SJohn Marino 
1221*e4b17023SJohn Marino rtx
find_last_value(rtx x,rtx * pinsn,rtx valid_to,int allow_hwreg)1222*e4b17023SJohn Marino find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1223*e4b17023SJohn Marino {
1224*e4b17023SJohn Marino   rtx p;
1225*e4b17023SJohn Marino 
1226*e4b17023SJohn Marino   for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1227*e4b17023SJohn Marino        p = PREV_INSN (p))
1228*e4b17023SJohn Marino     if (INSN_P (p))
1229*e4b17023SJohn Marino       {
1230*e4b17023SJohn Marino 	rtx set = single_set (p);
1231*e4b17023SJohn Marino 	rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1232*e4b17023SJohn Marino 
1233*e4b17023SJohn Marino 	if (set && rtx_equal_p (x, SET_DEST (set)))
1234*e4b17023SJohn Marino 	  {
1235*e4b17023SJohn Marino 	    rtx src = SET_SRC (set);
1236*e4b17023SJohn Marino 
1237*e4b17023SJohn Marino 	    if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1238*e4b17023SJohn Marino 	      src = XEXP (note, 0);
1239*e4b17023SJohn Marino 
1240*e4b17023SJohn Marino 	    if ((valid_to == NULL_RTX
1241*e4b17023SJohn Marino 		 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1242*e4b17023SJohn Marino 		/* Reject hard registers because we don't usually want
1243*e4b17023SJohn Marino 		   to use them; we'd rather use a pseudo.  */
1244*e4b17023SJohn Marino 		&& (! (REG_P (src)
1245*e4b17023SJohn Marino 		      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1246*e4b17023SJohn Marino 	      {
1247*e4b17023SJohn Marino 		*pinsn = p;
1248*e4b17023SJohn Marino 		return src;
1249*e4b17023SJohn Marino 	      }
1250*e4b17023SJohn Marino 	  }
1251*e4b17023SJohn Marino 
1252*e4b17023SJohn Marino 	/* If set in non-simple way, we don't have a value.  */
1253*e4b17023SJohn Marino 	if (reg_set_p (x, p))
1254*e4b17023SJohn Marino 	  break;
1255*e4b17023SJohn Marino       }
1256*e4b17023SJohn Marino 
1257*e4b17023SJohn Marino   return x;
1258*e4b17023SJohn Marino }
1259*e4b17023SJohn Marino 
1260*e4b17023SJohn Marino /* Return nonzero if register in range [REGNO, ENDREGNO)
1261*e4b17023SJohn Marino    appears either explicitly or implicitly in X
1262*e4b17023SJohn Marino    other than being stored into.
1263*e4b17023SJohn Marino 
1264*e4b17023SJohn Marino    References contained within the substructure at LOC do not count.
1265*e4b17023SJohn Marino    LOC may be zero, meaning don't ignore anything.  */
1266*e4b17023SJohn Marino 
1267*e4b17023SJohn Marino int
refers_to_regno_p(unsigned int regno,unsigned int endregno,const_rtx x,rtx * loc)1268*e4b17023SJohn Marino refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1269*e4b17023SJohn Marino 		   rtx *loc)
1270*e4b17023SJohn Marino {
1271*e4b17023SJohn Marino   int i;
1272*e4b17023SJohn Marino   unsigned int x_regno;
1273*e4b17023SJohn Marino   RTX_CODE code;
1274*e4b17023SJohn Marino   const char *fmt;
1275*e4b17023SJohn Marino 
1276*e4b17023SJohn Marino  repeat:
1277*e4b17023SJohn Marino   /* The contents of a REG_NONNEG note is always zero, so we must come here
1278*e4b17023SJohn Marino      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1279*e4b17023SJohn Marino   if (x == 0)
1280*e4b17023SJohn Marino     return 0;
1281*e4b17023SJohn Marino 
1282*e4b17023SJohn Marino   code = GET_CODE (x);
1283*e4b17023SJohn Marino 
1284*e4b17023SJohn Marino   switch (code)
1285*e4b17023SJohn Marino     {
1286*e4b17023SJohn Marino     case REG:
1287*e4b17023SJohn Marino       x_regno = REGNO (x);
1288*e4b17023SJohn Marino 
1289*e4b17023SJohn Marino       /* If we modifying the stack, frame, or argument pointer, it will
1290*e4b17023SJohn Marino 	 clobber a virtual register.  In fact, we could be more precise,
1291*e4b17023SJohn Marino 	 but it isn't worth it.  */
1292*e4b17023SJohn Marino       if ((x_regno == STACK_POINTER_REGNUM
1293*e4b17023SJohn Marino #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1294*e4b17023SJohn Marino 	   || x_regno == ARG_POINTER_REGNUM
1295*e4b17023SJohn Marino #endif
1296*e4b17023SJohn Marino 	   || x_regno == FRAME_POINTER_REGNUM)
1297*e4b17023SJohn Marino 	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1298*e4b17023SJohn Marino 	return 1;
1299*e4b17023SJohn Marino 
1300*e4b17023SJohn Marino       return endregno > x_regno && regno < END_REGNO (x);
1301*e4b17023SJohn Marino 
1302*e4b17023SJohn Marino     case SUBREG:
1303*e4b17023SJohn Marino       /* If this is a SUBREG of a hard reg, we can see exactly which
1304*e4b17023SJohn Marino 	 registers are being modified.  Otherwise, handle normally.  */
1305*e4b17023SJohn Marino       if (REG_P (SUBREG_REG (x))
1306*e4b17023SJohn Marino 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1307*e4b17023SJohn Marino 	{
1308*e4b17023SJohn Marino 	  unsigned int inner_regno = subreg_regno (x);
1309*e4b17023SJohn Marino 	  unsigned int inner_endregno
1310*e4b17023SJohn Marino 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1311*e4b17023SJohn Marino 			     ? subreg_nregs (x) : 1);
1312*e4b17023SJohn Marino 
1313*e4b17023SJohn Marino 	  return endregno > inner_regno && regno < inner_endregno;
1314*e4b17023SJohn Marino 	}
1315*e4b17023SJohn Marino       break;
1316*e4b17023SJohn Marino 
1317*e4b17023SJohn Marino     case CLOBBER:
1318*e4b17023SJohn Marino     case SET:
1319*e4b17023SJohn Marino       if (&SET_DEST (x) != loc
1320*e4b17023SJohn Marino 	  /* Note setting a SUBREG counts as referring to the REG it is in for
1321*e4b17023SJohn Marino 	     a pseudo but not for hard registers since we can
1322*e4b17023SJohn Marino 	     treat each word individually.  */
1323*e4b17023SJohn Marino 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
1324*e4b17023SJohn Marino 	       && loc != &SUBREG_REG (SET_DEST (x))
1325*e4b17023SJohn Marino 	       && REG_P (SUBREG_REG (SET_DEST (x)))
1326*e4b17023SJohn Marino 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1327*e4b17023SJohn Marino 	       && refers_to_regno_p (regno, endregno,
1328*e4b17023SJohn Marino 				     SUBREG_REG (SET_DEST (x)), loc))
1329*e4b17023SJohn Marino 	      || (!REG_P (SET_DEST (x))
1330*e4b17023SJohn Marino 		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1331*e4b17023SJohn Marino 	return 1;
1332*e4b17023SJohn Marino 
1333*e4b17023SJohn Marino       if (code == CLOBBER || loc == &SET_SRC (x))
1334*e4b17023SJohn Marino 	return 0;
1335*e4b17023SJohn Marino       x = SET_SRC (x);
1336*e4b17023SJohn Marino       goto repeat;
1337*e4b17023SJohn Marino 
1338*e4b17023SJohn Marino     default:
1339*e4b17023SJohn Marino       break;
1340*e4b17023SJohn Marino     }
1341*e4b17023SJohn Marino 
1342*e4b17023SJohn Marino   /* X does not match, so try its subexpressions.  */
1343*e4b17023SJohn Marino 
1344*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
1345*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1346*e4b17023SJohn Marino     {
1347*e4b17023SJohn Marino       if (fmt[i] == 'e' && loc != &XEXP (x, i))
1348*e4b17023SJohn Marino 	{
1349*e4b17023SJohn Marino 	  if (i == 0)
1350*e4b17023SJohn Marino 	    {
1351*e4b17023SJohn Marino 	      x = XEXP (x, 0);
1352*e4b17023SJohn Marino 	      goto repeat;
1353*e4b17023SJohn Marino 	    }
1354*e4b17023SJohn Marino 	  else
1355*e4b17023SJohn Marino 	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1356*e4b17023SJohn Marino 	      return 1;
1357*e4b17023SJohn Marino 	}
1358*e4b17023SJohn Marino       else if (fmt[i] == 'E')
1359*e4b17023SJohn Marino 	{
1360*e4b17023SJohn Marino 	  int j;
1361*e4b17023SJohn Marino 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1362*e4b17023SJohn Marino 	    if (loc != &XVECEXP (x, i, j)
1363*e4b17023SJohn Marino 		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1364*e4b17023SJohn Marino 	      return 1;
1365*e4b17023SJohn Marino 	}
1366*e4b17023SJohn Marino     }
1367*e4b17023SJohn Marino   return 0;
1368*e4b17023SJohn Marino }
1369*e4b17023SJohn Marino 
1370*e4b17023SJohn Marino /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1371*e4b17023SJohn Marino    we check if any register number in X conflicts with the relevant register
1372*e4b17023SJohn Marino    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1373*e4b17023SJohn Marino    contains a MEM (we don't bother checking for memory addresses that can't
1374*e4b17023SJohn Marino    conflict because we expect this to be a rare case.  */
1375*e4b17023SJohn Marino 
1376*e4b17023SJohn Marino int
reg_overlap_mentioned_p(const_rtx x,const_rtx in)1377*e4b17023SJohn Marino reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1378*e4b17023SJohn Marino {
1379*e4b17023SJohn Marino   unsigned int regno, endregno;
1380*e4b17023SJohn Marino 
1381*e4b17023SJohn Marino   /* If either argument is a constant, then modifying X can not
1382*e4b17023SJohn Marino      affect IN.  Here we look at IN, we can profitably combine
1383*e4b17023SJohn Marino      CONSTANT_P (x) with the switch statement below.  */
1384*e4b17023SJohn Marino   if (CONSTANT_P (in))
1385*e4b17023SJohn Marino     return 0;
1386*e4b17023SJohn Marino 
1387*e4b17023SJohn Marino  recurse:
1388*e4b17023SJohn Marino   switch (GET_CODE (x))
1389*e4b17023SJohn Marino     {
1390*e4b17023SJohn Marino     case STRICT_LOW_PART:
1391*e4b17023SJohn Marino     case ZERO_EXTRACT:
1392*e4b17023SJohn Marino     case SIGN_EXTRACT:
1393*e4b17023SJohn Marino       /* Overly conservative.  */
1394*e4b17023SJohn Marino       x = XEXP (x, 0);
1395*e4b17023SJohn Marino       goto recurse;
1396*e4b17023SJohn Marino 
1397*e4b17023SJohn Marino     case SUBREG:
1398*e4b17023SJohn Marino       regno = REGNO (SUBREG_REG (x));
1399*e4b17023SJohn Marino       if (regno < FIRST_PSEUDO_REGISTER)
1400*e4b17023SJohn Marino 	regno = subreg_regno (x);
1401*e4b17023SJohn Marino       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1402*e4b17023SJohn Marino 			  ? subreg_nregs (x) : 1);
1403*e4b17023SJohn Marino       goto do_reg;
1404*e4b17023SJohn Marino 
1405*e4b17023SJohn Marino     case REG:
1406*e4b17023SJohn Marino       regno = REGNO (x);
1407*e4b17023SJohn Marino       endregno = END_REGNO (x);
1408*e4b17023SJohn Marino     do_reg:
1409*e4b17023SJohn Marino       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1410*e4b17023SJohn Marino 
1411*e4b17023SJohn Marino     case MEM:
1412*e4b17023SJohn Marino       {
1413*e4b17023SJohn Marino 	const char *fmt;
1414*e4b17023SJohn Marino 	int i;
1415*e4b17023SJohn Marino 
1416*e4b17023SJohn Marino 	if (MEM_P (in))
1417*e4b17023SJohn Marino 	  return 1;
1418*e4b17023SJohn Marino 
1419*e4b17023SJohn Marino 	fmt = GET_RTX_FORMAT (GET_CODE (in));
1420*e4b17023SJohn Marino 	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1421*e4b17023SJohn Marino 	  if (fmt[i] == 'e')
1422*e4b17023SJohn Marino 	    {
1423*e4b17023SJohn Marino 	      if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1424*e4b17023SJohn Marino 		return 1;
1425*e4b17023SJohn Marino 	    }
1426*e4b17023SJohn Marino 	  else if (fmt[i] == 'E')
1427*e4b17023SJohn Marino 	    {
1428*e4b17023SJohn Marino 	      int j;
1429*e4b17023SJohn Marino 	      for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1430*e4b17023SJohn Marino 		if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1431*e4b17023SJohn Marino 		  return 1;
1432*e4b17023SJohn Marino 	    }
1433*e4b17023SJohn Marino 
1434*e4b17023SJohn Marino 	return 0;
1435*e4b17023SJohn Marino       }
1436*e4b17023SJohn Marino 
1437*e4b17023SJohn Marino     case SCRATCH:
1438*e4b17023SJohn Marino     case PC:
1439*e4b17023SJohn Marino     case CC0:
1440*e4b17023SJohn Marino       return reg_mentioned_p (x, in);
1441*e4b17023SJohn Marino 
1442*e4b17023SJohn Marino     case PARALLEL:
1443*e4b17023SJohn Marino       {
1444*e4b17023SJohn Marino 	int i;
1445*e4b17023SJohn Marino 
1446*e4b17023SJohn Marino 	/* If any register in here refers to it we return true.  */
1447*e4b17023SJohn Marino 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1448*e4b17023SJohn Marino 	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
1449*e4b17023SJohn Marino 	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1450*e4b17023SJohn Marino 	    return 1;
1451*e4b17023SJohn Marino 	return 0;
1452*e4b17023SJohn Marino       }
1453*e4b17023SJohn Marino 
1454*e4b17023SJohn Marino     default:
1455*e4b17023SJohn Marino       gcc_assert (CONSTANT_P (x));
1456*e4b17023SJohn Marino       return 0;
1457*e4b17023SJohn Marino     }
1458*e4b17023SJohn Marino }
1459*e4b17023SJohn Marino 
1460*e4b17023SJohn Marino /* Call FUN on each register or MEM that is stored into or clobbered by X.
1461*e4b17023SJohn Marino    (X would be the pattern of an insn).  DATA is an arbitrary pointer,
1462*e4b17023SJohn Marino    ignored by note_stores, but passed to FUN.
1463*e4b17023SJohn Marino 
1464*e4b17023SJohn Marino    FUN receives three arguments:
1465*e4b17023SJohn Marino    1. the REG, MEM, CC0 or PC being stored in or clobbered,
1466*e4b17023SJohn Marino    2. the SET or CLOBBER rtx that does the store,
1467*e4b17023SJohn Marino    3. the pointer DATA provided to note_stores.
1468*e4b17023SJohn Marino 
1469*e4b17023SJohn Marino   If the item being stored in or clobbered is a SUBREG of a hard register,
1470*e4b17023SJohn Marino   the SUBREG will be passed.  */
1471*e4b17023SJohn Marino 
1472*e4b17023SJohn Marino void
note_stores(const_rtx x,void (* fun)(rtx,const_rtx,void *),void * data)1473*e4b17023SJohn Marino note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1474*e4b17023SJohn Marino {
1475*e4b17023SJohn Marino   int i;
1476*e4b17023SJohn Marino 
1477*e4b17023SJohn Marino   if (GET_CODE (x) == COND_EXEC)
1478*e4b17023SJohn Marino     x = COND_EXEC_CODE (x);
1479*e4b17023SJohn Marino 
1480*e4b17023SJohn Marino   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1481*e4b17023SJohn Marino     {
1482*e4b17023SJohn Marino       rtx dest = SET_DEST (x);
1483*e4b17023SJohn Marino 
1484*e4b17023SJohn Marino       while ((GET_CODE (dest) == SUBREG
1485*e4b17023SJohn Marino 	      && (!REG_P (SUBREG_REG (dest))
1486*e4b17023SJohn Marino 		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1487*e4b17023SJohn Marino 	     || GET_CODE (dest) == ZERO_EXTRACT
1488*e4b17023SJohn Marino 	     || GET_CODE (dest) == STRICT_LOW_PART)
1489*e4b17023SJohn Marino 	dest = XEXP (dest, 0);
1490*e4b17023SJohn Marino 
1491*e4b17023SJohn Marino       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1492*e4b17023SJohn Marino 	 each of whose first operand is a register.  */
1493*e4b17023SJohn Marino       if (GET_CODE (dest) == PARALLEL)
1494*e4b17023SJohn Marino 	{
1495*e4b17023SJohn Marino 	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1496*e4b17023SJohn Marino 	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1497*e4b17023SJohn Marino 	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1498*e4b17023SJohn Marino 	}
1499*e4b17023SJohn Marino       else
1500*e4b17023SJohn Marino 	(*fun) (dest, x, data);
1501*e4b17023SJohn Marino     }
1502*e4b17023SJohn Marino 
1503*e4b17023SJohn Marino   else if (GET_CODE (x) == PARALLEL)
1504*e4b17023SJohn Marino     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1505*e4b17023SJohn Marino       note_stores (XVECEXP (x, 0, i), fun, data);
1506*e4b17023SJohn Marino }
1507*e4b17023SJohn Marino 
1508*e4b17023SJohn Marino /* Like notes_stores, but call FUN for each expression that is being
1509*e4b17023SJohn Marino    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1510*e4b17023SJohn Marino    FUN for each expression, not any interior subexpressions.  FUN receives a
1511*e4b17023SJohn Marino    pointer to the expression and the DATA passed to this function.
1512*e4b17023SJohn Marino 
1513*e4b17023SJohn Marino    Note that this is not quite the same test as that done in reg_referenced_p
1514*e4b17023SJohn Marino    since that considers something as being referenced if it is being
1515*e4b17023SJohn Marino    partially set, while we do not.  */
1516*e4b17023SJohn Marino 
1517*e4b17023SJohn Marino void
note_uses(rtx * pbody,void (* fun)(rtx *,void *),void * data)1518*e4b17023SJohn Marino note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1519*e4b17023SJohn Marino {
1520*e4b17023SJohn Marino   rtx body = *pbody;
1521*e4b17023SJohn Marino   int i;
1522*e4b17023SJohn Marino 
1523*e4b17023SJohn Marino   switch (GET_CODE (body))
1524*e4b17023SJohn Marino     {
1525*e4b17023SJohn Marino     case COND_EXEC:
1526*e4b17023SJohn Marino       (*fun) (&COND_EXEC_TEST (body), data);
1527*e4b17023SJohn Marino       note_uses (&COND_EXEC_CODE (body), fun, data);
1528*e4b17023SJohn Marino       return;
1529*e4b17023SJohn Marino 
1530*e4b17023SJohn Marino     case PARALLEL:
1531*e4b17023SJohn Marino       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1532*e4b17023SJohn Marino 	note_uses (&XVECEXP (body, 0, i), fun, data);
1533*e4b17023SJohn Marino       return;
1534*e4b17023SJohn Marino 
1535*e4b17023SJohn Marino     case SEQUENCE:
1536*e4b17023SJohn Marino       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1537*e4b17023SJohn Marino 	note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1538*e4b17023SJohn Marino       return;
1539*e4b17023SJohn Marino 
1540*e4b17023SJohn Marino     case USE:
1541*e4b17023SJohn Marino       (*fun) (&XEXP (body, 0), data);
1542*e4b17023SJohn Marino       return;
1543*e4b17023SJohn Marino 
1544*e4b17023SJohn Marino     case ASM_OPERANDS:
1545*e4b17023SJohn Marino       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1546*e4b17023SJohn Marino 	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1547*e4b17023SJohn Marino       return;
1548*e4b17023SJohn Marino 
1549*e4b17023SJohn Marino     case TRAP_IF:
1550*e4b17023SJohn Marino       (*fun) (&TRAP_CONDITION (body), data);
1551*e4b17023SJohn Marino       return;
1552*e4b17023SJohn Marino 
1553*e4b17023SJohn Marino     case PREFETCH:
1554*e4b17023SJohn Marino       (*fun) (&XEXP (body, 0), data);
1555*e4b17023SJohn Marino       return;
1556*e4b17023SJohn Marino 
1557*e4b17023SJohn Marino     case UNSPEC:
1558*e4b17023SJohn Marino     case UNSPEC_VOLATILE:
1559*e4b17023SJohn Marino       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1560*e4b17023SJohn Marino 	(*fun) (&XVECEXP (body, 0, i), data);
1561*e4b17023SJohn Marino       return;
1562*e4b17023SJohn Marino 
1563*e4b17023SJohn Marino     case CLOBBER:
1564*e4b17023SJohn Marino       if (MEM_P (XEXP (body, 0)))
1565*e4b17023SJohn Marino 	(*fun) (&XEXP (XEXP (body, 0), 0), data);
1566*e4b17023SJohn Marino       return;
1567*e4b17023SJohn Marino 
1568*e4b17023SJohn Marino     case SET:
1569*e4b17023SJohn Marino       {
1570*e4b17023SJohn Marino 	rtx dest = SET_DEST (body);
1571*e4b17023SJohn Marino 
1572*e4b17023SJohn Marino 	/* For sets we replace everything in source plus registers in memory
1573*e4b17023SJohn Marino 	   expression in store and operands of a ZERO_EXTRACT.  */
1574*e4b17023SJohn Marino 	(*fun) (&SET_SRC (body), data);
1575*e4b17023SJohn Marino 
1576*e4b17023SJohn Marino 	if (GET_CODE (dest) == ZERO_EXTRACT)
1577*e4b17023SJohn Marino 	  {
1578*e4b17023SJohn Marino 	    (*fun) (&XEXP (dest, 1), data);
1579*e4b17023SJohn Marino 	    (*fun) (&XEXP (dest, 2), data);
1580*e4b17023SJohn Marino 	  }
1581*e4b17023SJohn Marino 
1582*e4b17023SJohn Marino 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1583*e4b17023SJohn Marino 	  dest = XEXP (dest, 0);
1584*e4b17023SJohn Marino 
1585*e4b17023SJohn Marino 	if (MEM_P (dest))
1586*e4b17023SJohn Marino 	  (*fun) (&XEXP (dest, 0), data);
1587*e4b17023SJohn Marino       }
1588*e4b17023SJohn Marino       return;
1589*e4b17023SJohn Marino 
1590*e4b17023SJohn Marino     default:
1591*e4b17023SJohn Marino       /* All the other possibilities never store.  */
1592*e4b17023SJohn Marino       (*fun) (pbody, data);
1593*e4b17023SJohn Marino       return;
1594*e4b17023SJohn Marino     }
1595*e4b17023SJohn Marino }
1596*e4b17023SJohn Marino 
1597*e4b17023SJohn Marino /* Return nonzero if X's old contents don't survive after INSN.
1598*e4b17023SJohn Marino    This will be true if X is (cc0) or if X is a register and
1599*e4b17023SJohn Marino    X dies in INSN or because INSN entirely sets X.
1600*e4b17023SJohn Marino 
1601*e4b17023SJohn Marino    "Entirely set" means set directly and not through a SUBREG, or
1602*e4b17023SJohn Marino    ZERO_EXTRACT, so no trace of the old contents remains.
1603*e4b17023SJohn Marino    Likewise, REG_INC does not count.
1604*e4b17023SJohn Marino 
1605*e4b17023SJohn Marino    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1606*e4b17023SJohn Marino    but for this use that makes no difference, since regs don't overlap
1607*e4b17023SJohn Marino    during their lifetimes.  Therefore, this function may be used
1608*e4b17023SJohn Marino    at any time after deaths have been computed.
1609*e4b17023SJohn Marino 
1610*e4b17023SJohn Marino    If REG is a hard reg that occupies multiple machine registers, this
1611*e4b17023SJohn Marino    function will only return 1 if each of those registers will be replaced
1612*e4b17023SJohn Marino    by INSN.  */
1613*e4b17023SJohn Marino 
1614*e4b17023SJohn Marino int
dead_or_set_p(const_rtx insn,const_rtx x)1615*e4b17023SJohn Marino dead_or_set_p (const_rtx insn, const_rtx x)
1616*e4b17023SJohn Marino {
1617*e4b17023SJohn Marino   unsigned int regno, end_regno;
1618*e4b17023SJohn Marino   unsigned int i;
1619*e4b17023SJohn Marino 
1620*e4b17023SJohn Marino   /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1621*e4b17023SJohn Marino   if (GET_CODE (x) == CC0)
1622*e4b17023SJohn Marino     return 1;
1623*e4b17023SJohn Marino 
1624*e4b17023SJohn Marino   gcc_assert (REG_P (x));
1625*e4b17023SJohn Marino 
1626*e4b17023SJohn Marino   regno = REGNO (x);
1627*e4b17023SJohn Marino   end_regno = END_REGNO (x);
1628*e4b17023SJohn Marino   for (i = regno; i < end_regno; i++)
1629*e4b17023SJohn Marino     if (! dead_or_set_regno_p (insn, i))
1630*e4b17023SJohn Marino       return 0;
1631*e4b17023SJohn Marino 
1632*e4b17023SJohn Marino   return 1;
1633*e4b17023SJohn Marino }
1634*e4b17023SJohn Marino 
1635*e4b17023SJohn Marino /* Return TRUE iff DEST is a register or subreg of a register and
1636*e4b17023SJohn Marino    doesn't change the number of words of the inner register, and any
1637*e4b17023SJohn Marino    part of the register is TEST_REGNO.  */
1638*e4b17023SJohn Marino 
1639*e4b17023SJohn Marino static bool
covers_regno_no_parallel_p(const_rtx dest,unsigned int test_regno)1640*e4b17023SJohn Marino covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1641*e4b17023SJohn Marino {
1642*e4b17023SJohn Marino   unsigned int regno, endregno;
1643*e4b17023SJohn Marino 
1644*e4b17023SJohn Marino   if (GET_CODE (dest) == SUBREG
1645*e4b17023SJohn Marino       && (((GET_MODE_SIZE (GET_MODE (dest))
1646*e4b17023SJohn Marino 	    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1647*e4b17023SJohn Marino 	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1648*e4b17023SJohn Marino 	       + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1649*e4b17023SJohn Marino     dest = SUBREG_REG (dest);
1650*e4b17023SJohn Marino 
1651*e4b17023SJohn Marino   if (!REG_P (dest))
1652*e4b17023SJohn Marino     return false;
1653*e4b17023SJohn Marino 
1654*e4b17023SJohn Marino   regno = REGNO (dest);
1655*e4b17023SJohn Marino   endregno = END_REGNO (dest);
1656*e4b17023SJohn Marino   return (test_regno >= regno && test_regno < endregno);
1657*e4b17023SJohn Marino }
1658*e4b17023SJohn Marino 
1659*e4b17023SJohn Marino /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1660*e4b17023SJohn Marino    any member matches the covers_regno_no_parallel_p criteria.  */
1661*e4b17023SJohn Marino 
1662*e4b17023SJohn Marino static bool
covers_regno_p(const_rtx dest,unsigned int test_regno)1663*e4b17023SJohn Marino covers_regno_p (const_rtx dest, unsigned int test_regno)
1664*e4b17023SJohn Marino {
1665*e4b17023SJohn Marino   if (GET_CODE (dest) == PARALLEL)
1666*e4b17023SJohn Marino     {
1667*e4b17023SJohn Marino       /* Some targets place small structures in registers for return
1668*e4b17023SJohn Marino 	 values of functions, and those registers are wrapped in
1669*e4b17023SJohn Marino 	 PARALLELs that we may see as the destination of a SET.  */
1670*e4b17023SJohn Marino       int i;
1671*e4b17023SJohn Marino 
1672*e4b17023SJohn Marino       for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1673*e4b17023SJohn Marino 	{
1674*e4b17023SJohn Marino 	  rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1675*e4b17023SJohn Marino 	  if (inner != NULL_RTX
1676*e4b17023SJohn Marino 	      && covers_regno_no_parallel_p (inner, test_regno))
1677*e4b17023SJohn Marino 	    return true;
1678*e4b17023SJohn Marino 	}
1679*e4b17023SJohn Marino 
1680*e4b17023SJohn Marino       return false;
1681*e4b17023SJohn Marino     }
1682*e4b17023SJohn Marino   else
1683*e4b17023SJohn Marino     return covers_regno_no_parallel_p (dest, test_regno);
1684*e4b17023SJohn Marino }
1685*e4b17023SJohn Marino 
1686*e4b17023SJohn Marino /* Utility function for dead_or_set_p to check an individual register. */
1687*e4b17023SJohn Marino 
1688*e4b17023SJohn Marino int
dead_or_set_regno_p(const_rtx insn,unsigned int test_regno)1689*e4b17023SJohn Marino dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1690*e4b17023SJohn Marino {
1691*e4b17023SJohn Marino   const_rtx pattern;
1692*e4b17023SJohn Marino 
1693*e4b17023SJohn Marino   /* See if there is a death note for something that includes TEST_REGNO.  */
1694*e4b17023SJohn Marino   if (find_regno_note (insn, REG_DEAD, test_regno))
1695*e4b17023SJohn Marino     return 1;
1696*e4b17023SJohn Marino 
1697*e4b17023SJohn Marino   if (CALL_P (insn)
1698*e4b17023SJohn Marino       && find_regno_fusage (insn, CLOBBER, test_regno))
1699*e4b17023SJohn Marino     return 1;
1700*e4b17023SJohn Marino 
1701*e4b17023SJohn Marino   pattern = PATTERN (insn);
1702*e4b17023SJohn Marino 
1703*e4b17023SJohn Marino   if (GET_CODE (pattern) == COND_EXEC)
1704*e4b17023SJohn Marino     pattern = COND_EXEC_CODE (pattern);
1705*e4b17023SJohn Marino 
1706*e4b17023SJohn Marino   if (GET_CODE (pattern) == SET)
1707*e4b17023SJohn Marino     return covers_regno_p (SET_DEST (pattern), test_regno);
1708*e4b17023SJohn Marino   else if (GET_CODE (pattern) == PARALLEL)
1709*e4b17023SJohn Marino     {
1710*e4b17023SJohn Marino       int i;
1711*e4b17023SJohn Marino 
1712*e4b17023SJohn Marino       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1713*e4b17023SJohn Marino 	{
1714*e4b17023SJohn Marino 	  rtx body = XVECEXP (pattern, 0, i);
1715*e4b17023SJohn Marino 
1716*e4b17023SJohn Marino 	  if (GET_CODE (body) == COND_EXEC)
1717*e4b17023SJohn Marino 	    body = COND_EXEC_CODE (body);
1718*e4b17023SJohn Marino 
1719*e4b17023SJohn Marino 	  if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1720*e4b17023SJohn Marino 	      && covers_regno_p (SET_DEST (body), test_regno))
1721*e4b17023SJohn Marino 	    return 1;
1722*e4b17023SJohn Marino 	}
1723*e4b17023SJohn Marino     }
1724*e4b17023SJohn Marino 
1725*e4b17023SJohn Marino   return 0;
1726*e4b17023SJohn Marino }
1727*e4b17023SJohn Marino 
1728*e4b17023SJohn Marino /* Return the reg-note of kind KIND in insn INSN, if there is one.
1729*e4b17023SJohn Marino    If DATUM is nonzero, look for one whose datum is DATUM.  */
1730*e4b17023SJohn Marino 
1731*e4b17023SJohn Marino rtx
find_reg_note(const_rtx insn,enum reg_note kind,const_rtx datum)1732*e4b17023SJohn Marino find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1733*e4b17023SJohn Marino {
1734*e4b17023SJohn Marino   rtx link;
1735*e4b17023SJohn Marino 
1736*e4b17023SJohn Marino   gcc_checking_assert (insn);
1737*e4b17023SJohn Marino 
1738*e4b17023SJohn Marino   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1739*e4b17023SJohn Marino   if (! INSN_P (insn))
1740*e4b17023SJohn Marino     return 0;
1741*e4b17023SJohn Marino   if (datum == 0)
1742*e4b17023SJohn Marino     {
1743*e4b17023SJohn Marino       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1744*e4b17023SJohn Marino 	if (REG_NOTE_KIND (link) == kind)
1745*e4b17023SJohn Marino 	  return link;
1746*e4b17023SJohn Marino       return 0;
1747*e4b17023SJohn Marino     }
1748*e4b17023SJohn Marino 
1749*e4b17023SJohn Marino   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1750*e4b17023SJohn Marino     if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1751*e4b17023SJohn Marino       return link;
1752*e4b17023SJohn Marino   return 0;
1753*e4b17023SJohn Marino }
1754*e4b17023SJohn Marino 
1755*e4b17023SJohn Marino /* Return the reg-note of kind KIND in insn INSN which applies to register
1756*e4b17023SJohn Marino    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1757*e4b17023SJohn Marino    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1758*e4b17023SJohn Marino    it might be the case that the note overlaps REGNO.  */
1759*e4b17023SJohn Marino 
1760*e4b17023SJohn Marino rtx
find_regno_note(const_rtx insn,enum reg_note kind,unsigned int regno)1761*e4b17023SJohn Marino find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1762*e4b17023SJohn Marino {
1763*e4b17023SJohn Marino   rtx link;
1764*e4b17023SJohn Marino 
1765*e4b17023SJohn Marino   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1766*e4b17023SJohn Marino   if (! INSN_P (insn))
1767*e4b17023SJohn Marino     return 0;
1768*e4b17023SJohn Marino 
1769*e4b17023SJohn Marino   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1770*e4b17023SJohn Marino     if (REG_NOTE_KIND (link) == kind
1771*e4b17023SJohn Marino 	/* Verify that it is a register, so that scratch and MEM won't cause a
1772*e4b17023SJohn Marino 	   problem here.  */
1773*e4b17023SJohn Marino 	&& REG_P (XEXP (link, 0))
1774*e4b17023SJohn Marino 	&& REGNO (XEXP (link, 0)) <= regno
1775*e4b17023SJohn Marino 	&& END_REGNO (XEXP (link, 0)) > regno)
1776*e4b17023SJohn Marino       return link;
1777*e4b17023SJohn Marino   return 0;
1778*e4b17023SJohn Marino }
1779*e4b17023SJohn Marino 
1780*e4b17023SJohn Marino /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1781*e4b17023SJohn Marino    has such a note.  */
1782*e4b17023SJohn Marino 
1783*e4b17023SJohn Marino rtx
find_reg_equal_equiv_note(const_rtx insn)1784*e4b17023SJohn Marino find_reg_equal_equiv_note (const_rtx insn)
1785*e4b17023SJohn Marino {
1786*e4b17023SJohn Marino   rtx link;
1787*e4b17023SJohn Marino 
1788*e4b17023SJohn Marino   if (!INSN_P (insn))
1789*e4b17023SJohn Marino     return 0;
1790*e4b17023SJohn Marino 
1791*e4b17023SJohn Marino   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1792*e4b17023SJohn Marino     if (REG_NOTE_KIND (link) == REG_EQUAL
1793*e4b17023SJohn Marino 	|| REG_NOTE_KIND (link) == REG_EQUIV)
1794*e4b17023SJohn Marino       {
1795*e4b17023SJohn Marino 	/* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1796*e4b17023SJohn Marino 	   insns that have multiple sets.  Checking single_set to
1797*e4b17023SJohn Marino 	   make sure of this is not the proper check, as explained
1798*e4b17023SJohn Marino 	   in the comment in set_unique_reg_note.
1799*e4b17023SJohn Marino 
1800*e4b17023SJohn Marino 	   This should be changed into an assert.  */
1801*e4b17023SJohn Marino 	if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1802*e4b17023SJohn Marino 	  return 0;
1803*e4b17023SJohn Marino 	return link;
1804*e4b17023SJohn Marino       }
1805*e4b17023SJohn Marino   return NULL;
1806*e4b17023SJohn Marino }
1807*e4b17023SJohn Marino 
1808*e4b17023SJohn Marino /* Check whether INSN is a single_set whose source is known to be
1809*e4b17023SJohn Marino    equivalent to a constant.  Return that constant if so, otherwise
1810*e4b17023SJohn Marino    return null.  */
1811*e4b17023SJohn Marino 
1812*e4b17023SJohn Marino rtx
find_constant_src(const_rtx insn)1813*e4b17023SJohn Marino find_constant_src (const_rtx insn)
1814*e4b17023SJohn Marino {
1815*e4b17023SJohn Marino   rtx note, set, x;
1816*e4b17023SJohn Marino 
1817*e4b17023SJohn Marino   set = single_set (insn);
1818*e4b17023SJohn Marino   if (set)
1819*e4b17023SJohn Marino     {
1820*e4b17023SJohn Marino       x = avoid_constant_pool_reference (SET_SRC (set));
1821*e4b17023SJohn Marino       if (CONSTANT_P (x))
1822*e4b17023SJohn Marino 	return x;
1823*e4b17023SJohn Marino     }
1824*e4b17023SJohn Marino 
1825*e4b17023SJohn Marino   note = find_reg_equal_equiv_note (insn);
1826*e4b17023SJohn Marino   if (note && CONSTANT_P (XEXP (note, 0)))
1827*e4b17023SJohn Marino     return XEXP (note, 0);
1828*e4b17023SJohn Marino 
1829*e4b17023SJohn Marino   return NULL_RTX;
1830*e4b17023SJohn Marino }
1831*e4b17023SJohn Marino 
1832*e4b17023SJohn Marino /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1833*e4b17023SJohn Marino    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1834*e4b17023SJohn Marino 
1835*e4b17023SJohn Marino int
find_reg_fusage(const_rtx insn,enum rtx_code code,const_rtx datum)1836*e4b17023SJohn Marino find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1837*e4b17023SJohn Marino {
1838*e4b17023SJohn Marino   /* If it's not a CALL_INSN, it can't possibly have a
1839*e4b17023SJohn Marino      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1840*e4b17023SJohn Marino   if (!CALL_P (insn))
1841*e4b17023SJohn Marino     return 0;
1842*e4b17023SJohn Marino 
1843*e4b17023SJohn Marino   gcc_assert (datum);
1844*e4b17023SJohn Marino 
1845*e4b17023SJohn Marino   if (!REG_P (datum))
1846*e4b17023SJohn Marino     {
1847*e4b17023SJohn Marino       rtx link;
1848*e4b17023SJohn Marino 
1849*e4b17023SJohn Marino       for (link = CALL_INSN_FUNCTION_USAGE (insn);
1850*e4b17023SJohn Marino 	   link;
1851*e4b17023SJohn Marino 	   link = XEXP (link, 1))
1852*e4b17023SJohn Marino 	if (GET_CODE (XEXP (link, 0)) == code
1853*e4b17023SJohn Marino 	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1854*e4b17023SJohn Marino 	  return 1;
1855*e4b17023SJohn Marino     }
1856*e4b17023SJohn Marino   else
1857*e4b17023SJohn Marino     {
1858*e4b17023SJohn Marino       unsigned int regno = REGNO (datum);
1859*e4b17023SJohn Marino 
1860*e4b17023SJohn Marino       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1861*e4b17023SJohn Marino 	 to pseudo registers, so don't bother checking.  */
1862*e4b17023SJohn Marino 
1863*e4b17023SJohn Marino       if (regno < FIRST_PSEUDO_REGISTER)
1864*e4b17023SJohn Marino 	{
1865*e4b17023SJohn Marino 	  unsigned int end_regno = END_HARD_REGNO (datum);
1866*e4b17023SJohn Marino 	  unsigned int i;
1867*e4b17023SJohn Marino 
1868*e4b17023SJohn Marino 	  for (i = regno; i < end_regno; i++)
1869*e4b17023SJohn Marino 	    if (find_regno_fusage (insn, code, i))
1870*e4b17023SJohn Marino 	      return 1;
1871*e4b17023SJohn Marino 	}
1872*e4b17023SJohn Marino     }
1873*e4b17023SJohn Marino 
1874*e4b17023SJohn Marino   return 0;
1875*e4b17023SJohn Marino }
1876*e4b17023SJohn Marino 
1877*e4b17023SJohn Marino /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1878*e4b17023SJohn Marino    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1879*e4b17023SJohn Marino 
1880*e4b17023SJohn Marino int
find_regno_fusage(const_rtx insn,enum rtx_code code,unsigned int regno)1881*e4b17023SJohn Marino find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1882*e4b17023SJohn Marino {
1883*e4b17023SJohn Marino   rtx link;
1884*e4b17023SJohn Marino 
1885*e4b17023SJohn Marino   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1886*e4b17023SJohn Marino      to pseudo registers, so don't bother checking.  */
1887*e4b17023SJohn Marino 
1888*e4b17023SJohn Marino   if (regno >= FIRST_PSEUDO_REGISTER
1889*e4b17023SJohn Marino       || !CALL_P (insn) )
1890*e4b17023SJohn Marino     return 0;
1891*e4b17023SJohn Marino 
1892*e4b17023SJohn Marino   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1893*e4b17023SJohn Marino     {
1894*e4b17023SJohn Marino       rtx op, reg;
1895*e4b17023SJohn Marino 
1896*e4b17023SJohn Marino       if (GET_CODE (op = XEXP (link, 0)) == code
1897*e4b17023SJohn Marino 	  && REG_P (reg = XEXP (op, 0))
1898*e4b17023SJohn Marino 	  && REGNO (reg) <= regno
1899*e4b17023SJohn Marino 	  && END_HARD_REGNO (reg) > regno)
1900*e4b17023SJohn Marino 	return 1;
1901*e4b17023SJohn Marino     }
1902*e4b17023SJohn Marino 
1903*e4b17023SJohn Marino   return 0;
1904*e4b17023SJohn Marino }
1905*e4b17023SJohn Marino 
1906*e4b17023SJohn Marino 
1907*e4b17023SJohn Marino /* Allocate a register note with kind KIND and datum DATUM.  LIST is
1908*e4b17023SJohn Marino    stored as the pointer to the next register note.  */
1909*e4b17023SJohn Marino 
1910*e4b17023SJohn Marino rtx
alloc_reg_note(enum reg_note kind,rtx datum,rtx list)1911*e4b17023SJohn Marino alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
1912*e4b17023SJohn Marino {
1913*e4b17023SJohn Marino   rtx note;
1914*e4b17023SJohn Marino 
1915*e4b17023SJohn Marino   switch (kind)
1916*e4b17023SJohn Marino     {
1917*e4b17023SJohn Marino     case REG_CC_SETTER:
1918*e4b17023SJohn Marino     case REG_CC_USER:
1919*e4b17023SJohn Marino     case REG_LABEL_TARGET:
1920*e4b17023SJohn Marino     case REG_LABEL_OPERAND:
1921*e4b17023SJohn Marino     case REG_TM:
1922*e4b17023SJohn Marino       /* These types of register notes use an INSN_LIST rather than an
1923*e4b17023SJohn Marino 	 EXPR_LIST, so that copying is done right and dumps look
1924*e4b17023SJohn Marino 	 better.  */
1925*e4b17023SJohn Marino       note = alloc_INSN_LIST (datum, list);
1926*e4b17023SJohn Marino       PUT_REG_NOTE_KIND (note, kind);
1927*e4b17023SJohn Marino       break;
1928*e4b17023SJohn Marino 
1929*e4b17023SJohn Marino     default:
1930*e4b17023SJohn Marino       note = alloc_EXPR_LIST (kind, datum, list);
1931*e4b17023SJohn Marino       break;
1932*e4b17023SJohn Marino     }
1933*e4b17023SJohn Marino 
1934*e4b17023SJohn Marino   return note;
1935*e4b17023SJohn Marino }
1936*e4b17023SJohn Marino 
1937*e4b17023SJohn Marino /* Add register note with kind KIND and datum DATUM to INSN.  */
1938*e4b17023SJohn Marino 
1939*e4b17023SJohn Marino void
add_reg_note(rtx insn,enum reg_note kind,rtx datum)1940*e4b17023SJohn Marino add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1941*e4b17023SJohn Marino {
1942*e4b17023SJohn Marino   REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
1943*e4b17023SJohn Marino }
1944*e4b17023SJohn Marino 
1945*e4b17023SJohn Marino /* Remove register note NOTE from the REG_NOTES of INSN.  */
1946*e4b17023SJohn Marino 
1947*e4b17023SJohn Marino void
remove_note(rtx insn,const_rtx note)1948*e4b17023SJohn Marino remove_note (rtx insn, const_rtx note)
1949*e4b17023SJohn Marino {
1950*e4b17023SJohn Marino   rtx link;
1951*e4b17023SJohn Marino 
1952*e4b17023SJohn Marino   if (note == NULL_RTX)
1953*e4b17023SJohn Marino     return;
1954*e4b17023SJohn Marino 
1955*e4b17023SJohn Marino   if (REG_NOTES (insn) == note)
1956*e4b17023SJohn Marino     REG_NOTES (insn) = XEXP (note, 1);
1957*e4b17023SJohn Marino   else
1958*e4b17023SJohn Marino     for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1959*e4b17023SJohn Marino       if (XEXP (link, 1) == note)
1960*e4b17023SJohn Marino 	{
1961*e4b17023SJohn Marino 	  XEXP (link, 1) = XEXP (note, 1);
1962*e4b17023SJohn Marino 	  break;
1963*e4b17023SJohn Marino 	}
1964*e4b17023SJohn Marino 
1965*e4b17023SJohn Marino   switch (REG_NOTE_KIND (note))
1966*e4b17023SJohn Marino     {
1967*e4b17023SJohn Marino     case REG_EQUAL:
1968*e4b17023SJohn Marino     case REG_EQUIV:
1969*e4b17023SJohn Marino       df_notes_rescan (insn);
1970*e4b17023SJohn Marino       break;
1971*e4b17023SJohn Marino     default:
1972*e4b17023SJohn Marino       break;
1973*e4b17023SJohn Marino     }
1974*e4b17023SJohn Marino }
1975*e4b17023SJohn Marino 
1976*e4b17023SJohn Marino /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.  */
1977*e4b17023SJohn Marino 
1978*e4b17023SJohn Marino void
remove_reg_equal_equiv_notes(rtx insn)1979*e4b17023SJohn Marino remove_reg_equal_equiv_notes (rtx insn)
1980*e4b17023SJohn Marino {
1981*e4b17023SJohn Marino   rtx *loc;
1982*e4b17023SJohn Marino 
1983*e4b17023SJohn Marino   loc = &REG_NOTES (insn);
1984*e4b17023SJohn Marino   while (*loc)
1985*e4b17023SJohn Marino     {
1986*e4b17023SJohn Marino       enum reg_note kind = REG_NOTE_KIND (*loc);
1987*e4b17023SJohn Marino       if (kind == REG_EQUAL || kind == REG_EQUIV)
1988*e4b17023SJohn Marino 	*loc = XEXP (*loc, 1);
1989*e4b17023SJohn Marino       else
1990*e4b17023SJohn Marino 	loc = &XEXP (*loc, 1);
1991*e4b17023SJohn Marino     }
1992*e4b17023SJohn Marino }
1993*e4b17023SJohn Marino 
1994*e4b17023SJohn Marino /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO.  */
1995*e4b17023SJohn Marino 
1996*e4b17023SJohn Marino void
remove_reg_equal_equiv_notes_for_regno(unsigned int regno)1997*e4b17023SJohn Marino remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
1998*e4b17023SJohn Marino {
1999*e4b17023SJohn Marino   df_ref eq_use;
2000*e4b17023SJohn Marino 
2001*e4b17023SJohn Marino   if (!df)
2002*e4b17023SJohn Marino     return;
2003*e4b17023SJohn Marino 
2004*e4b17023SJohn Marino   /* This loop is a little tricky.  We cannot just go down the chain because
2005*e4b17023SJohn Marino      it is being modified by some actions in the loop.  So we just iterate
2006*e4b17023SJohn Marino      over the head.  We plan to drain the list anyway.  */
2007*e4b17023SJohn Marino   while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2008*e4b17023SJohn Marino     {
2009*e4b17023SJohn Marino       rtx insn = DF_REF_INSN (eq_use);
2010*e4b17023SJohn Marino       rtx note = find_reg_equal_equiv_note (insn);
2011*e4b17023SJohn Marino 
2012*e4b17023SJohn Marino       /* This assert is generally triggered when someone deletes a REG_EQUAL
2013*e4b17023SJohn Marino 	 or REG_EQUIV note by hacking the list manually rather than calling
2014*e4b17023SJohn Marino 	 remove_note.  */
2015*e4b17023SJohn Marino       gcc_assert (note);
2016*e4b17023SJohn Marino 
2017*e4b17023SJohn Marino       remove_note (insn, note);
2018*e4b17023SJohn Marino     }
2019*e4b17023SJohn Marino }
2020*e4b17023SJohn Marino 
2021*e4b17023SJohn Marino /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2022*e4b17023SJohn Marino    return 1 if it is found.  A simple equality test is used to determine if
2023*e4b17023SJohn Marino    NODE matches.  */
2024*e4b17023SJohn Marino 
2025*e4b17023SJohn Marino int
in_expr_list_p(const_rtx listp,const_rtx node)2026*e4b17023SJohn Marino in_expr_list_p (const_rtx listp, const_rtx node)
2027*e4b17023SJohn Marino {
2028*e4b17023SJohn Marino   const_rtx x;
2029*e4b17023SJohn Marino 
2030*e4b17023SJohn Marino   for (x = listp; x; x = XEXP (x, 1))
2031*e4b17023SJohn Marino     if (node == XEXP (x, 0))
2032*e4b17023SJohn Marino       return 1;
2033*e4b17023SJohn Marino 
2034*e4b17023SJohn Marino   return 0;
2035*e4b17023SJohn Marino }
2036*e4b17023SJohn Marino 
2037*e4b17023SJohn Marino /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2038*e4b17023SJohn Marino    remove that entry from the list if it is found.
2039*e4b17023SJohn Marino 
2040*e4b17023SJohn Marino    A simple equality test is used to determine if NODE matches.  */
2041*e4b17023SJohn Marino 
2042*e4b17023SJohn Marino void
remove_node_from_expr_list(const_rtx node,rtx * listp)2043*e4b17023SJohn Marino remove_node_from_expr_list (const_rtx node, rtx *listp)
2044*e4b17023SJohn Marino {
2045*e4b17023SJohn Marino   rtx temp = *listp;
2046*e4b17023SJohn Marino   rtx prev = NULL_RTX;
2047*e4b17023SJohn Marino 
2048*e4b17023SJohn Marino   while (temp)
2049*e4b17023SJohn Marino     {
2050*e4b17023SJohn Marino       if (node == XEXP (temp, 0))
2051*e4b17023SJohn Marino 	{
2052*e4b17023SJohn Marino 	  /* Splice the node out of the list.  */
2053*e4b17023SJohn Marino 	  if (prev)
2054*e4b17023SJohn Marino 	    XEXP (prev, 1) = XEXP (temp, 1);
2055*e4b17023SJohn Marino 	  else
2056*e4b17023SJohn Marino 	    *listp = XEXP (temp, 1);
2057*e4b17023SJohn Marino 
2058*e4b17023SJohn Marino 	  return;
2059*e4b17023SJohn Marino 	}
2060*e4b17023SJohn Marino 
2061*e4b17023SJohn Marino       prev = temp;
2062*e4b17023SJohn Marino       temp = XEXP (temp, 1);
2063*e4b17023SJohn Marino     }
2064*e4b17023SJohn Marino }
2065*e4b17023SJohn Marino 
2066*e4b17023SJohn Marino /* Nonzero if X contains any volatile instructions.  These are instructions
2067*e4b17023SJohn Marino    which may cause unpredictable machine state instructions, and thus no
2068*e4b17023SJohn Marino    instructions should be moved or combined across them.  This includes
2069*e4b17023SJohn Marino    only volatile asms and UNSPEC_VOLATILE instructions.  */
2070*e4b17023SJohn Marino 
2071*e4b17023SJohn Marino int
volatile_insn_p(const_rtx x)2072*e4b17023SJohn Marino volatile_insn_p (const_rtx x)
2073*e4b17023SJohn Marino {
2074*e4b17023SJohn Marino   const RTX_CODE code = GET_CODE (x);
2075*e4b17023SJohn Marino   switch (code)
2076*e4b17023SJohn Marino     {
2077*e4b17023SJohn Marino     case LABEL_REF:
2078*e4b17023SJohn Marino     case SYMBOL_REF:
2079*e4b17023SJohn Marino     case CONST_INT:
2080*e4b17023SJohn Marino     case CONST:
2081*e4b17023SJohn Marino     case CONST_DOUBLE:
2082*e4b17023SJohn Marino     case CONST_FIXED:
2083*e4b17023SJohn Marino     case CONST_VECTOR:
2084*e4b17023SJohn Marino     case CC0:
2085*e4b17023SJohn Marino     case PC:
2086*e4b17023SJohn Marino     case REG:
2087*e4b17023SJohn Marino     case SCRATCH:
2088*e4b17023SJohn Marino     case CLOBBER:
2089*e4b17023SJohn Marino     case ADDR_VEC:
2090*e4b17023SJohn Marino     case ADDR_DIFF_VEC:
2091*e4b17023SJohn Marino     case CALL:
2092*e4b17023SJohn Marino     case MEM:
2093*e4b17023SJohn Marino       return 0;
2094*e4b17023SJohn Marino 
2095*e4b17023SJohn Marino     case UNSPEC_VOLATILE:
2096*e4b17023SJohn Marino  /* case TRAP_IF: This isn't clear yet.  */
2097*e4b17023SJohn Marino       return 1;
2098*e4b17023SJohn Marino 
2099*e4b17023SJohn Marino     case ASM_INPUT:
2100*e4b17023SJohn Marino     case ASM_OPERANDS:
2101*e4b17023SJohn Marino       if (MEM_VOLATILE_P (x))
2102*e4b17023SJohn Marino 	return 1;
2103*e4b17023SJohn Marino 
2104*e4b17023SJohn Marino     default:
2105*e4b17023SJohn Marino       break;
2106*e4b17023SJohn Marino     }
2107*e4b17023SJohn Marino 
2108*e4b17023SJohn Marino   /* Recursively scan the operands of this expression.  */
2109*e4b17023SJohn Marino 
2110*e4b17023SJohn Marino   {
2111*e4b17023SJohn Marino     const char *const fmt = GET_RTX_FORMAT (code);
2112*e4b17023SJohn Marino     int i;
2113*e4b17023SJohn Marino 
2114*e4b17023SJohn Marino     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2115*e4b17023SJohn Marino       {
2116*e4b17023SJohn Marino 	if (fmt[i] == 'e')
2117*e4b17023SJohn Marino 	  {
2118*e4b17023SJohn Marino 	    if (volatile_insn_p (XEXP (x, i)))
2119*e4b17023SJohn Marino 	      return 1;
2120*e4b17023SJohn Marino 	  }
2121*e4b17023SJohn Marino 	else if (fmt[i] == 'E')
2122*e4b17023SJohn Marino 	  {
2123*e4b17023SJohn Marino 	    int j;
2124*e4b17023SJohn Marino 	    for (j = 0; j < XVECLEN (x, i); j++)
2125*e4b17023SJohn Marino 	      if (volatile_insn_p (XVECEXP (x, i, j)))
2126*e4b17023SJohn Marino 		return 1;
2127*e4b17023SJohn Marino 	  }
2128*e4b17023SJohn Marino       }
2129*e4b17023SJohn Marino   }
2130*e4b17023SJohn Marino   return 0;
2131*e4b17023SJohn Marino }
2132*e4b17023SJohn Marino 
2133*e4b17023SJohn Marino /* Nonzero if X contains any volatile memory references
2134*e4b17023SJohn Marino    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
2135*e4b17023SJohn Marino 
2136*e4b17023SJohn Marino int
volatile_refs_p(const_rtx x)2137*e4b17023SJohn Marino volatile_refs_p (const_rtx x)
2138*e4b17023SJohn Marino {
2139*e4b17023SJohn Marino   const RTX_CODE code = GET_CODE (x);
2140*e4b17023SJohn Marino   switch (code)
2141*e4b17023SJohn Marino     {
2142*e4b17023SJohn Marino     case LABEL_REF:
2143*e4b17023SJohn Marino     case SYMBOL_REF:
2144*e4b17023SJohn Marino     case CONST_INT:
2145*e4b17023SJohn Marino     case CONST:
2146*e4b17023SJohn Marino     case CONST_DOUBLE:
2147*e4b17023SJohn Marino     case CONST_FIXED:
2148*e4b17023SJohn Marino     case CONST_VECTOR:
2149*e4b17023SJohn Marino     case CC0:
2150*e4b17023SJohn Marino     case PC:
2151*e4b17023SJohn Marino     case REG:
2152*e4b17023SJohn Marino     case SCRATCH:
2153*e4b17023SJohn Marino     case CLOBBER:
2154*e4b17023SJohn Marino     case ADDR_VEC:
2155*e4b17023SJohn Marino     case ADDR_DIFF_VEC:
2156*e4b17023SJohn Marino       return 0;
2157*e4b17023SJohn Marino 
2158*e4b17023SJohn Marino     case UNSPEC_VOLATILE:
2159*e4b17023SJohn Marino       return 1;
2160*e4b17023SJohn Marino 
2161*e4b17023SJohn Marino     case MEM:
2162*e4b17023SJohn Marino     case ASM_INPUT:
2163*e4b17023SJohn Marino     case ASM_OPERANDS:
2164*e4b17023SJohn Marino       if (MEM_VOLATILE_P (x))
2165*e4b17023SJohn Marino 	return 1;
2166*e4b17023SJohn Marino 
2167*e4b17023SJohn Marino     default:
2168*e4b17023SJohn Marino       break;
2169*e4b17023SJohn Marino     }
2170*e4b17023SJohn Marino 
2171*e4b17023SJohn Marino   /* Recursively scan the operands of this expression.  */
2172*e4b17023SJohn Marino 
2173*e4b17023SJohn Marino   {
2174*e4b17023SJohn Marino     const char *const fmt = GET_RTX_FORMAT (code);
2175*e4b17023SJohn Marino     int i;
2176*e4b17023SJohn Marino 
2177*e4b17023SJohn Marino     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2178*e4b17023SJohn Marino       {
2179*e4b17023SJohn Marino 	if (fmt[i] == 'e')
2180*e4b17023SJohn Marino 	  {
2181*e4b17023SJohn Marino 	    if (volatile_refs_p (XEXP (x, i)))
2182*e4b17023SJohn Marino 	      return 1;
2183*e4b17023SJohn Marino 	  }
2184*e4b17023SJohn Marino 	else if (fmt[i] == 'E')
2185*e4b17023SJohn Marino 	  {
2186*e4b17023SJohn Marino 	    int j;
2187*e4b17023SJohn Marino 	    for (j = 0; j < XVECLEN (x, i); j++)
2188*e4b17023SJohn Marino 	      if (volatile_refs_p (XVECEXP (x, i, j)))
2189*e4b17023SJohn Marino 		return 1;
2190*e4b17023SJohn Marino 	  }
2191*e4b17023SJohn Marino       }
2192*e4b17023SJohn Marino   }
2193*e4b17023SJohn Marino   return 0;
2194*e4b17023SJohn Marino }
2195*e4b17023SJohn Marino 
2196*e4b17023SJohn Marino /* Similar to above, except that it also rejects register pre- and post-
2197*e4b17023SJohn Marino    incrementing.  */
2198*e4b17023SJohn Marino 
2199*e4b17023SJohn Marino int
side_effects_p(const_rtx x)2200*e4b17023SJohn Marino side_effects_p (const_rtx x)
2201*e4b17023SJohn Marino {
2202*e4b17023SJohn Marino   const RTX_CODE code = GET_CODE (x);
2203*e4b17023SJohn Marino   switch (code)
2204*e4b17023SJohn Marino     {
2205*e4b17023SJohn Marino     case LABEL_REF:
2206*e4b17023SJohn Marino     case SYMBOL_REF:
2207*e4b17023SJohn Marino     case CONST_INT:
2208*e4b17023SJohn Marino     case CONST:
2209*e4b17023SJohn Marino     case CONST_DOUBLE:
2210*e4b17023SJohn Marino     case CONST_FIXED:
2211*e4b17023SJohn Marino     case CONST_VECTOR:
2212*e4b17023SJohn Marino     case CC0:
2213*e4b17023SJohn Marino     case PC:
2214*e4b17023SJohn Marino     case REG:
2215*e4b17023SJohn Marino     case SCRATCH:
2216*e4b17023SJohn Marino     case ADDR_VEC:
2217*e4b17023SJohn Marino     case ADDR_DIFF_VEC:
2218*e4b17023SJohn Marino     case VAR_LOCATION:
2219*e4b17023SJohn Marino       return 0;
2220*e4b17023SJohn Marino 
2221*e4b17023SJohn Marino     case CLOBBER:
2222*e4b17023SJohn Marino       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2223*e4b17023SJohn Marino 	 when some combination can't be done.  If we see one, don't think
2224*e4b17023SJohn Marino 	 that we can simplify the expression.  */
2225*e4b17023SJohn Marino       return (GET_MODE (x) != VOIDmode);
2226*e4b17023SJohn Marino 
2227*e4b17023SJohn Marino     case PRE_INC:
2228*e4b17023SJohn Marino     case PRE_DEC:
2229*e4b17023SJohn Marino     case POST_INC:
2230*e4b17023SJohn Marino     case POST_DEC:
2231*e4b17023SJohn Marino     case PRE_MODIFY:
2232*e4b17023SJohn Marino     case POST_MODIFY:
2233*e4b17023SJohn Marino     case CALL:
2234*e4b17023SJohn Marino     case UNSPEC_VOLATILE:
2235*e4b17023SJohn Marino  /* case TRAP_IF: This isn't clear yet.  */
2236*e4b17023SJohn Marino       return 1;
2237*e4b17023SJohn Marino 
2238*e4b17023SJohn Marino     case MEM:
2239*e4b17023SJohn Marino     case ASM_INPUT:
2240*e4b17023SJohn Marino     case ASM_OPERANDS:
2241*e4b17023SJohn Marino       if (MEM_VOLATILE_P (x))
2242*e4b17023SJohn Marino 	return 1;
2243*e4b17023SJohn Marino 
2244*e4b17023SJohn Marino     default:
2245*e4b17023SJohn Marino       break;
2246*e4b17023SJohn Marino     }
2247*e4b17023SJohn Marino 
2248*e4b17023SJohn Marino   /* Recursively scan the operands of this expression.  */
2249*e4b17023SJohn Marino 
2250*e4b17023SJohn Marino   {
2251*e4b17023SJohn Marino     const char *fmt = GET_RTX_FORMAT (code);
2252*e4b17023SJohn Marino     int i;
2253*e4b17023SJohn Marino 
2254*e4b17023SJohn Marino     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2255*e4b17023SJohn Marino       {
2256*e4b17023SJohn Marino 	if (fmt[i] == 'e')
2257*e4b17023SJohn Marino 	  {
2258*e4b17023SJohn Marino 	    if (side_effects_p (XEXP (x, i)))
2259*e4b17023SJohn Marino 	      return 1;
2260*e4b17023SJohn Marino 	  }
2261*e4b17023SJohn Marino 	else if (fmt[i] == 'E')
2262*e4b17023SJohn Marino 	  {
2263*e4b17023SJohn Marino 	    int j;
2264*e4b17023SJohn Marino 	    for (j = 0; j < XVECLEN (x, i); j++)
2265*e4b17023SJohn Marino 	      if (side_effects_p (XVECEXP (x, i, j)))
2266*e4b17023SJohn Marino 		return 1;
2267*e4b17023SJohn Marino 	  }
2268*e4b17023SJohn Marino       }
2269*e4b17023SJohn Marino   }
2270*e4b17023SJohn Marino   return 0;
2271*e4b17023SJohn Marino }
2272*e4b17023SJohn Marino 
2273*e4b17023SJohn Marino /* Return nonzero if evaluating rtx X might cause a trap.
2274*e4b17023SJohn Marino    FLAGS controls how to consider MEMs.  A nonzero means the context
2275*e4b17023SJohn Marino    of the access may have changed from the original, such that the
2276*e4b17023SJohn Marino    address may have become invalid.  */
2277*e4b17023SJohn Marino 
2278*e4b17023SJohn Marino int
may_trap_p_1(const_rtx x,unsigned flags)2279*e4b17023SJohn Marino may_trap_p_1 (const_rtx x, unsigned flags)
2280*e4b17023SJohn Marino {
2281*e4b17023SJohn Marino   int i;
2282*e4b17023SJohn Marino   enum rtx_code code;
2283*e4b17023SJohn Marino   const char *fmt;
2284*e4b17023SJohn Marino 
2285*e4b17023SJohn Marino   /* We make no distinction currently, but this function is part of
2286*e4b17023SJohn Marino      the internal target-hooks ABI so we keep the parameter as
2287*e4b17023SJohn Marino      "unsigned flags".  */
2288*e4b17023SJohn Marino   bool code_changed = flags != 0;
2289*e4b17023SJohn Marino 
2290*e4b17023SJohn Marino   if (x == 0)
2291*e4b17023SJohn Marino     return 0;
2292*e4b17023SJohn Marino   code = GET_CODE (x);
2293*e4b17023SJohn Marino   switch (code)
2294*e4b17023SJohn Marino     {
2295*e4b17023SJohn Marino       /* Handle these cases quickly.  */
2296*e4b17023SJohn Marino     case CONST_INT:
2297*e4b17023SJohn Marino     case CONST_DOUBLE:
2298*e4b17023SJohn Marino     case CONST_FIXED:
2299*e4b17023SJohn Marino     case CONST_VECTOR:
2300*e4b17023SJohn Marino     case SYMBOL_REF:
2301*e4b17023SJohn Marino     case LABEL_REF:
2302*e4b17023SJohn Marino     case CONST:
2303*e4b17023SJohn Marino     case PC:
2304*e4b17023SJohn Marino     case CC0:
2305*e4b17023SJohn Marino     case REG:
2306*e4b17023SJohn Marino     case SCRATCH:
2307*e4b17023SJohn Marino       return 0;
2308*e4b17023SJohn Marino 
2309*e4b17023SJohn Marino     case UNSPEC:
2310*e4b17023SJohn Marino     case UNSPEC_VOLATILE:
2311*e4b17023SJohn Marino       return targetm.unspec_may_trap_p (x, flags);
2312*e4b17023SJohn Marino 
2313*e4b17023SJohn Marino     case ASM_INPUT:
2314*e4b17023SJohn Marino     case TRAP_IF:
2315*e4b17023SJohn Marino       return 1;
2316*e4b17023SJohn Marino 
2317*e4b17023SJohn Marino     case ASM_OPERANDS:
2318*e4b17023SJohn Marino       return MEM_VOLATILE_P (x);
2319*e4b17023SJohn Marino 
2320*e4b17023SJohn Marino       /* Memory ref can trap unless it's a static var or a stack slot.  */
2321*e4b17023SJohn Marino     case MEM:
2322*e4b17023SJohn Marino       /* Recognize specific pattern of stack checking probes.  */
2323*e4b17023SJohn Marino       if (flag_stack_check
2324*e4b17023SJohn Marino 	  && MEM_VOLATILE_P (x)
2325*e4b17023SJohn Marino 	  && XEXP (x, 0) == stack_pointer_rtx)
2326*e4b17023SJohn Marino 	return 1;
2327*e4b17023SJohn Marino       if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2328*e4b17023SJohn Marino 	     reference; moving it out of context such as when moving code
2329*e4b17023SJohn Marino 	     when optimizing, might cause its address to become invalid.  */
2330*e4b17023SJohn Marino 	  code_changed
2331*e4b17023SJohn Marino 	  || !MEM_NOTRAP_P (x))
2332*e4b17023SJohn Marino 	{
2333*e4b17023SJohn Marino 	  HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
2334*e4b17023SJohn Marino 	  return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2335*e4b17023SJohn Marino 					GET_MODE (x), code_changed);
2336*e4b17023SJohn Marino 	}
2337*e4b17023SJohn Marino 
2338*e4b17023SJohn Marino       return 0;
2339*e4b17023SJohn Marino 
2340*e4b17023SJohn Marino       /* Division by a non-constant might trap.  */
2341*e4b17023SJohn Marino     case DIV:
2342*e4b17023SJohn Marino     case MOD:
2343*e4b17023SJohn Marino     case UDIV:
2344*e4b17023SJohn Marino     case UMOD:
2345*e4b17023SJohn Marino       if (HONOR_SNANS (GET_MODE (x)))
2346*e4b17023SJohn Marino 	return 1;
2347*e4b17023SJohn Marino       if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2348*e4b17023SJohn Marino 	return flag_trapping_math;
2349*e4b17023SJohn Marino       if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2350*e4b17023SJohn Marino 	return 1;
2351*e4b17023SJohn Marino       break;
2352*e4b17023SJohn Marino 
2353*e4b17023SJohn Marino     case EXPR_LIST:
2354*e4b17023SJohn Marino       /* An EXPR_LIST is used to represent a function call.  This
2355*e4b17023SJohn Marino 	 certainly may trap.  */
2356*e4b17023SJohn Marino       return 1;
2357*e4b17023SJohn Marino 
2358*e4b17023SJohn Marino     case GE:
2359*e4b17023SJohn Marino     case GT:
2360*e4b17023SJohn Marino     case LE:
2361*e4b17023SJohn Marino     case LT:
2362*e4b17023SJohn Marino     case LTGT:
2363*e4b17023SJohn Marino     case COMPARE:
2364*e4b17023SJohn Marino       /* Some floating point comparisons may trap.  */
2365*e4b17023SJohn Marino       if (!flag_trapping_math)
2366*e4b17023SJohn Marino 	break;
2367*e4b17023SJohn Marino       /* ??? There is no machine independent way to check for tests that trap
2368*e4b17023SJohn Marino 	 when COMPARE is used, though many targets do make this distinction.
2369*e4b17023SJohn Marino 	 For instance, sparc uses CCFPE for compares which generate exceptions
2370*e4b17023SJohn Marino 	 and CCFP for compares which do not generate exceptions.  */
2371*e4b17023SJohn Marino       if (HONOR_NANS (GET_MODE (x)))
2372*e4b17023SJohn Marino 	return 1;
2373*e4b17023SJohn Marino       /* But often the compare has some CC mode, so check operand
2374*e4b17023SJohn Marino 	 modes as well.  */
2375*e4b17023SJohn Marino       if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2376*e4b17023SJohn Marino 	  || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2377*e4b17023SJohn Marino 	return 1;
2378*e4b17023SJohn Marino       break;
2379*e4b17023SJohn Marino 
2380*e4b17023SJohn Marino     case EQ:
2381*e4b17023SJohn Marino     case NE:
2382*e4b17023SJohn Marino       if (HONOR_SNANS (GET_MODE (x)))
2383*e4b17023SJohn Marino 	return 1;
2384*e4b17023SJohn Marino       /* Often comparison is CC mode, so check operand modes.  */
2385*e4b17023SJohn Marino       if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2386*e4b17023SJohn Marino 	  || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2387*e4b17023SJohn Marino 	return 1;
2388*e4b17023SJohn Marino       break;
2389*e4b17023SJohn Marino 
2390*e4b17023SJohn Marino     case FIX:
2391*e4b17023SJohn Marino       /* Conversion of floating point might trap.  */
2392*e4b17023SJohn Marino       if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2393*e4b17023SJohn Marino 	return 1;
2394*e4b17023SJohn Marino       break;
2395*e4b17023SJohn Marino 
2396*e4b17023SJohn Marino     case NEG:
2397*e4b17023SJohn Marino     case ABS:
2398*e4b17023SJohn Marino     case SUBREG:
2399*e4b17023SJohn Marino       /* These operations don't trap even with floating point.  */
2400*e4b17023SJohn Marino       break;
2401*e4b17023SJohn Marino 
2402*e4b17023SJohn Marino     default:
2403*e4b17023SJohn Marino       /* Any floating arithmetic may trap.  */
2404*e4b17023SJohn Marino       if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2405*e4b17023SJohn Marino 	  && flag_trapping_math)
2406*e4b17023SJohn Marino 	return 1;
2407*e4b17023SJohn Marino     }
2408*e4b17023SJohn Marino 
2409*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
2410*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2411*e4b17023SJohn Marino     {
2412*e4b17023SJohn Marino       if (fmt[i] == 'e')
2413*e4b17023SJohn Marino 	{
2414*e4b17023SJohn Marino 	  if (may_trap_p_1 (XEXP (x, i), flags))
2415*e4b17023SJohn Marino 	    return 1;
2416*e4b17023SJohn Marino 	}
2417*e4b17023SJohn Marino       else if (fmt[i] == 'E')
2418*e4b17023SJohn Marino 	{
2419*e4b17023SJohn Marino 	  int j;
2420*e4b17023SJohn Marino 	  for (j = 0; j < XVECLEN (x, i); j++)
2421*e4b17023SJohn Marino 	    if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2422*e4b17023SJohn Marino 	      return 1;
2423*e4b17023SJohn Marino 	}
2424*e4b17023SJohn Marino     }
2425*e4b17023SJohn Marino   return 0;
2426*e4b17023SJohn Marino }
2427*e4b17023SJohn Marino 
2428*e4b17023SJohn Marino /* Return nonzero if evaluating rtx X might cause a trap.  */
2429*e4b17023SJohn Marino 
2430*e4b17023SJohn Marino int
may_trap_p(const_rtx x)2431*e4b17023SJohn Marino may_trap_p (const_rtx x)
2432*e4b17023SJohn Marino {
2433*e4b17023SJohn Marino   return may_trap_p_1 (x, 0);
2434*e4b17023SJohn Marino }
2435*e4b17023SJohn Marino 
2436*e4b17023SJohn Marino /* Same as above, but additionally return nonzero if evaluating rtx X might
2437*e4b17023SJohn Marino    cause a fault.  We define a fault for the purpose of this function as a
2438*e4b17023SJohn Marino    erroneous execution condition that cannot be encountered during the normal
2439*e4b17023SJohn Marino    execution of a valid program; the typical example is an unaligned memory
2440*e4b17023SJohn Marino    access on a strict alignment machine.  The compiler guarantees that it
2441*e4b17023SJohn Marino    doesn't generate code that will fault from a valid program, but this
2442*e4b17023SJohn Marino    guarantee doesn't mean anything for individual instructions.  Consider
2443*e4b17023SJohn Marino    the following example:
2444*e4b17023SJohn Marino 
2445*e4b17023SJohn Marino       struct S { int d; union { char *cp; int *ip; }; };
2446*e4b17023SJohn Marino 
2447*e4b17023SJohn Marino       int foo(struct S *s)
2448*e4b17023SJohn Marino       {
2449*e4b17023SJohn Marino 	if (s->d == 1)
2450*e4b17023SJohn Marino 	  return *s->ip;
2451*e4b17023SJohn Marino 	else
2452*e4b17023SJohn Marino 	  return *s->cp;
2453*e4b17023SJohn Marino       }
2454*e4b17023SJohn Marino 
2455*e4b17023SJohn Marino    on a strict alignment machine.  In a valid program, foo will never be
2456*e4b17023SJohn Marino    invoked on a structure for which d is equal to 1 and the underlying
2457*e4b17023SJohn Marino    unique field of the union not aligned on a 4-byte boundary, but the
2458*e4b17023SJohn Marino    expression *s->ip might cause a fault if considered individually.
2459*e4b17023SJohn Marino 
2460*e4b17023SJohn Marino    At the RTL level, potentially problematic expressions will almost always
2461*e4b17023SJohn Marino    verify may_trap_p; for example, the above dereference can be emitted as
2462*e4b17023SJohn Marino    (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2463*e4b17023SJohn Marino    However, suppose that foo is inlined in a caller that causes s->cp to
2464*e4b17023SJohn Marino    point to a local character variable and guarantees that s->d is not set
2465*e4b17023SJohn Marino    to 1; foo may have been effectively translated into pseudo-RTL as:
2466*e4b17023SJohn Marino 
2467*e4b17023SJohn Marino       if ((reg:SI) == 1)
2468*e4b17023SJohn Marino 	(set (reg:SI) (mem:SI (%fp - 7)))
2469*e4b17023SJohn Marino       else
2470*e4b17023SJohn Marino 	(set (reg:QI) (mem:QI (%fp - 7)))
2471*e4b17023SJohn Marino 
2472*e4b17023SJohn Marino    Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2473*e4b17023SJohn Marino    memory reference to a stack slot, but it will certainly cause a fault
2474*e4b17023SJohn Marino    on a strict alignment machine.  */
2475*e4b17023SJohn Marino 
2476*e4b17023SJohn Marino int
may_trap_or_fault_p(const_rtx x)2477*e4b17023SJohn Marino may_trap_or_fault_p (const_rtx x)
2478*e4b17023SJohn Marino {
2479*e4b17023SJohn Marino   return may_trap_p_1 (x, 1);
2480*e4b17023SJohn Marino }
2481*e4b17023SJohn Marino 
2482*e4b17023SJohn Marino /* Return nonzero if X contains a comparison that is not either EQ or NE,
2483*e4b17023SJohn Marino    i.e., an inequality.  */
2484*e4b17023SJohn Marino 
2485*e4b17023SJohn Marino int
inequality_comparisons_p(const_rtx x)2486*e4b17023SJohn Marino inequality_comparisons_p (const_rtx x)
2487*e4b17023SJohn Marino {
2488*e4b17023SJohn Marino   const char *fmt;
2489*e4b17023SJohn Marino   int len, i;
2490*e4b17023SJohn Marino   const enum rtx_code code = GET_CODE (x);
2491*e4b17023SJohn Marino 
2492*e4b17023SJohn Marino   switch (code)
2493*e4b17023SJohn Marino     {
2494*e4b17023SJohn Marino     case REG:
2495*e4b17023SJohn Marino     case SCRATCH:
2496*e4b17023SJohn Marino     case PC:
2497*e4b17023SJohn Marino     case CC0:
2498*e4b17023SJohn Marino     case CONST_INT:
2499*e4b17023SJohn Marino     case CONST_DOUBLE:
2500*e4b17023SJohn Marino     case CONST_FIXED:
2501*e4b17023SJohn Marino     case CONST_VECTOR:
2502*e4b17023SJohn Marino     case CONST:
2503*e4b17023SJohn Marino     case LABEL_REF:
2504*e4b17023SJohn Marino     case SYMBOL_REF:
2505*e4b17023SJohn Marino       return 0;
2506*e4b17023SJohn Marino 
2507*e4b17023SJohn Marino     case LT:
2508*e4b17023SJohn Marino     case LTU:
2509*e4b17023SJohn Marino     case GT:
2510*e4b17023SJohn Marino     case GTU:
2511*e4b17023SJohn Marino     case LE:
2512*e4b17023SJohn Marino     case LEU:
2513*e4b17023SJohn Marino     case GE:
2514*e4b17023SJohn Marino     case GEU:
2515*e4b17023SJohn Marino       return 1;
2516*e4b17023SJohn Marino 
2517*e4b17023SJohn Marino     default:
2518*e4b17023SJohn Marino       break;
2519*e4b17023SJohn Marino     }
2520*e4b17023SJohn Marino 
2521*e4b17023SJohn Marino   len = GET_RTX_LENGTH (code);
2522*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
2523*e4b17023SJohn Marino 
2524*e4b17023SJohn Marino   for (i = 0; i < len; i++)
2525*e4b17023SJohn Marino     {
2526*e4b17023SJohn Marino       if (fmt[i] == 'e')
2527*e4b17023SJohn Marino 	{
2528*e4b17023SJohn Marino 	  if (inequality_comparisons_p (XEXP (x, i)))
2529*e4b17023SJohn Marino 	    return 1;
2530*e4b17023SJohn Marino 	}
2531*e4b17023SJohn Marino       else if (fmt[i] == 'E')
2532*e4b17023SJohn Marino 	{
2533*e4b17023SJohn Marino 	  int j;
2534*e4b17023SJohn Marino 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2535*e4b17023SJohn Marino 	    if (inequality_comparisons_p (XVECEXP (x, i, j)))
2536*e4b17023SJohn Marino 	      return 1;
2537*e4b17023SJohn Marino 	}
2538*e4b17023SJohn Marino     }
2539*e4b17023SJohn Marino 
2540*e4b17023SJohn Marino   return 0;
2541*e4b17023SJohn Marino }
2542*e4b17023SJohn Marino 
2543*e4b17023SJohn Marino /* Replace any occurrence of FROM in X with TO.  The function does
2544*e4b17023SJohn Marino    not enter into CONST_DOUBLE for the replace.
2545*e4b17023SJohn Marino 
2546*e4b17023SJohn Marino    Note that copying is not done so X must not be shared unless all copies
2547*e4b17023SJohn Marino    are to be modified.  */
2548*e4b17023SJohn Marino 
2549*e4b17023SJohn Marino rtx
replace_rtx(rtx x,rtx from,rtx to)2550*e4b17023SJohn Marino replace_rtx (rtx x, rtx from, rtx to)
2551*e4b17023SJohn Marino {
2552*e4b17023SJohn Marino   int i, j;
2553*e4b17023SJohn Marino   const char *fmt;
2554*e4b17023SJohn Marino 
2555*e4b17023SJohn Marino   /* The following prevents loops occurrence when we change MEM in
2556*e4b17023SJohn Marino      CONST_DOUBLE onto the same CONST_DOUBLE.  */
2557*e4b17023SJohn Marino   if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2558*e4b17023SJohn Marino     return x;
2559*e4b17023SJohn Marino 
2560*e4b17023SJohn Marino   if (x == from)
2561*e4b17023SJohn Marino     return to;
2562*e4b17023SJohn Marino 
2563*e4b17023SJohn Marino   /* Allow this function to make replacements in EXPR_LISTs.  */
2564*e4b17023SJohn Marino   if (x == 0)
2565*e4b17023SJohn Marino     return 0;
2566*e4b17023SJohn Marino 
2567*e4b17023SJohn Marino   if (GET_CODE (x) == SUBREG)
2568*e4b17023SJohn Marino     {
2569*e4b17023SJohn Marino       rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2570*e4b17023SJohn Marino 
2571*e4b17023SJohn Marino       if (CONST_INT_P (new_rtx))
2572*e4b17023SJohn Marino 	{
2573*e4b17023SJohn Marino 	  x = simplify_subreg (GET_MODE (x), new_rtx,
2574*e4b17023SJohn Marino 			       GET_MODE (SUBREG_REG (x)),
2575*e4b17023SJohn Marino 			       SUBREG_BYTE (x));
2576*e4b17023SJohn Marino 	  gcc_assert (x);
2577*e4b17023SJohn Marino 	}
2578*e4b17023SJohn Marino       else
2579*e4b17023SJohn Marino 	SUBREG_REG (x) = new_rtx;
2580*e4b17023SJohn Marino 
2581*e4b17023SJohn Marino       return x;
2582*e4b17023SJohn Marino     }
2583*e4b17023SJohn Marino   else if (GET_CODE (x) == ZERO_EXTEND)
2584*e4b17023SJohn Marino     {
2585*e4b17023SJohn Marino       rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2586*e4b17023SJohn Marino 
2587*e4b17023SJohn Marino       if (CONST_INT_P (new_rtx))
2588*e4b17023SJohn Marino 	{
2589*e4b17023SJohn Marino 	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2590*e4b17023SJohn Marino 					new_rtx, GET_MODE (XEXP (x, 0)));
2591*e4b17023SJohn Marino 	  gcc_assert (x);
2592*e4b17023SJohn Marino 	}
2593*e4b17023SJohn Marino       else
2594*e4b17023SJohn Marino 	XEXP (x, 0) = new_rtx;
2595*e4b17023SJohn Marino 
2596*e4b17023SJohn Marino       return x;
2597*e4b17023SJohn Marino     }
2598*e4b17023SJohn Marino 
2599*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (GET_CODE (x));
2600*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2601*e4b17023SJohn Marino     {
2602*e4b17023SJohn Marino       if (fmt[i] == 'e')
2603*e4b17023SJohn Marino 	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2604*e4b17023SJohn Marino       else if (fmt[i] == 'E')
2605*e4b17023SJohn Marino 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2606*e4b17023SJohn Marino 	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2607*e4b17023SJohn Marino     }
2608*e4b17023SJohn Marino 
2609*e4b17023SJohn Marino   return x;
2610*e4b17023SJohn Marino }
2611*e4b17023SJohn Marino 
2612*e4b17023SJohn Marino /* Replace occurrences of the old label in *X with the new one.
2613*e4b17023SJohn Marino    DATA is a REPLACE_LABEL_DATA containing the old and new labels.  */
2614*e4b17023SJohn Marino 
2615*e4b17023SJohn Marino int
replace_label(rtx * x,void * data)2616*e4b17023SJohn Marino replace_label (rtx *x, void *data)
2617*e4b17023SJohn Marino {
2618*e4b17023SJohn Marino   rtx l = *x;
2619*e4b17023SJohn Marino   rtx old_label = ((replace_label_data *) data)->r1;
2620*e4b17023SJohn Marino   rtx new_label = ((replace_label_data *) data)->r2;
2621*e4b17023SJohn Marino   bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2622*e4b17023SJohn Marino 
2623*e4b17023SJohn Marino   if (l == NULL_RTX)
2624*e4b17023SJohn Marino     return 0;
2625*e4b17023SJohn Marino 
2626*e4b17023SJohn Marino   if (GET_CODE (l) == SYMBOL_REF
2627*e4b17023SJohn Marino       && CONSTANT_POOL_ADDRESS_P (l))
2628*e4b17023SJohn Marino     {
2629*e4b17023SJohn Marino       rtx c = get_pool_constant (l);
2630*e4b17023SJohn Marino       if (rtx_referenced_p (old_label, c))
2631*e4b17023SJohn Marino 	{
2632*e4b17023SJohn Marino 	  rtx new_c, new_l;
2633*e4b17023SJohn Marino 	  replace_label_data *d = (replace_label_data *) data;
2634*e4b17023SJohn Marino 
2635*e4b17023SJohn Marino 	  /* Create a copy of constant C; replace the label inside
2636*e4b17023SJohn Marino 	     but do not update LABEL_NUSES because uses in constant pool
2637*e4b17023SJohn Marino 	     are not counted.  */
2638*e4b17023SJohn Marino 	  new_c = copy_rtx (c);
2639*e4b17023SJohn Marino 	  d->update_label_nuses = false;
2640*e4b17023SJohn Marino 	  for_each_rtx (&new_c, replace_label, data);
2641*e4b17023SJohn Marino 	  d->update_label_nuses = update_label_nuses;
2642*e4b17023SJohn Marino 
2643*e4b17023SJohn Marino 	  /* Add the new constant NEW_C to constant pool and replace
2644*e4b17023SJohn Marino 	     the old reference to constant by new reference.  */
2645*e4b17023SJohn Marino 	  new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2646*e4b17023SJohn Marino 	  *x = replace_rtx (l, l, new_l);
2647*e4b17023SJohn Marino 	}
2648*e4b17023SJohn Marino       return 0;
2649*e4b17023SJohn Marino     }
2650*e4b17023SJohn Marino 
2651*e4b17023SJohn Marino   /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2652*e4b17023SJohn Marino      field.  This is not handled by for_each_rtx because it doesn't
2653*e4b17023SJohn Marino      handle unprinted ('0') fields.  */
2654*e4b17023SJohn Marino   if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2655*e4b17023SJohn Marino     JUMP_LABEL (l) = new_label;
2656*e4b17023SJohn Marino 
2657*e4b17023SJohn Marino   if ((GET_CODE (l) == LABEL_REF
2658*e4b17023SJohn Marino        || GET_CODE (l) == INSN_LIST)
2659*e4b17023SJohn Marino       && XEXP (l, 0) == old_label)
2660*e4b17023SJohn Marino     {
2661*e4b17023SJohn Marino       XEXP (l, 0) = new_label;
2662*e4b17023SJohn Marino       if (update_label_nuses)
2663*e4b17023SJohn Marino 	{
2664*e4b17023SJohn Marino 	  ++LABEL_NUSES (new_label);
2665*e4b17023SJohn Marino 	  --LABEL_NUSES (old_label);
2666*e4b17023SJohn Marino 	}
2667*e4b17023SJohn Marino       return 0;
2668*e4b17023SJohn Marino     }
2669*e4b17023SJohn Marino 
2670*e4b17023SJohn Marino   return 0;
2671*e4b17023SJohn Marino }
2672*e4b17023SJohn Marino 
2673*e4b17023SJohn Marino /* When *BODY is equal to X or X is directly referenced by *BODY
2674*e4b17023SJohn Marino    return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2675*e4b17023SJohn Marino    too, otherwise FOR_EACH_RTX continues traversing *BODY.  */
2676*e4b17023SJohn Marino 
2677*e4b17023SJohn Marino static int
rtx_referenced_p_1(rtx * body,void * x)2678*e4b17023SJohn Marino rtx_referenced_p_1 (rtx *body, void *x)
2679*e4b17023SJohn Marino {
2680*e4b17023SJohn Marino   rtx y = (rtx) x;
2681*e4b17023SJohn Marino 
2682*e4b17023SJohn Marino   if (*body == NULL_RTX)
2683*e4b17023SJohn Marino     return y == NULL_RTX;
2684*e4b17023SJohn Marino 
2685*e4b17023SJohn Marino   /* Return true if a label_ref *BODY refers to label Y.  */
2686*e4b17023SJohn Marino   if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2687*e4b17023SJohn Marino     return XEXP (*body, 0) == y;
2688*e4b17023SJohn Marino 
2689*e4b17023SJohn Marino   /* If *BODY is a reference to pool constant traverse the constant.  */
2690*e4b17023SJohn Marino   if (GET_CODE (*body) == SYMBOL_REF
2691*e4b17023SJohn Marino       && CONSTANT_POOL_ADDRESS_P (*body))
2692*e4b17023SJohn Marino     return rtx_referenced_p (y, get_pool_constant (*body));
2693*e4b17023SJohn Marino 
2694*e4b17023SJohn Marino   /* By default, compare the RTL expressions.  */
2695*e4b17023SJohn Marino   return rtx_equal_p (*body, y);
2696*e4b17023SJohn Marino }
2697*e4b17023SJohn Marino 
2698*e4b17023SJohn Marino /* Return true if X is referenced in BODY.  */
2699*e4b17023SJohn Marino 
2700*e4b17023SJohn Marino int
rtx_referenced_p(rtx x,rtx body)2701*e4b17023SJohn Marino rtx_referenced_p (rtx x, rtx body)
2702*e4b17023SJohn Marino {
2703*e4b17023SJohn Marino   return for_each_rtx (&body, rtx_referenced_p_1, x);
2704*e4b17023SJohn Marino }
2705*e4b17023SJohn Marino 
2706*e4b17023SJohn Marino /* If INSN is a tablejump return true and store the label (before jump table) to
2707*e4b17023SJohn Marino    *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2708*e4b17023SJohn Marino 
2709*e4b17023SJohn Marino bool
tablejump_p(const_rtx insn,rtx * labelp,rtx * tablep)2710*e4b17023SJohn Marino tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2711*e4b17023SJohn Marino {
2712*e4b17023SJohn Marino   rtx label, table;
2713*e4b17023SJohn Marino 
2714*e4b17023SJohn Marino   if (!JUMP_P (insn))
2715*e4b17023SJohn Marino     return false;
2716*e4b17023SJohn Marino 
2717*e4b17023SJohn Marino   label = JUMP_LABEL (insn);
2718*e4b17023SJohn Marino   if (label != NULL_RTX && !ANY_RETURN_P (label)
2719*e4b17023SJohn Marino       && (table = next_active_insn (label)) != NULL_RTX
2720*e4b17023SJohn Marino       && JUMP_TABLE_DATA_P (table))
2721*e4b17023SJohn Marino     {
2722*e4b17023SJohn Marino       if (labelp)
2723*e4b17023SJohn Marino 	*labelp = label;
2724*e4b17023SJohn Marino       if (tablep)
2725*e4b17023SJohn Marino 	*tablep = table;
2726*e4b17023SJohn Marino       return true;
2727*e4b17023SJohn Marino     }
2728*e4b17023SJohn Marino   return false;
2729*e4b17023SJohn Marino }
2730*e4b17023SJohn Marino 
2731*e4b17023SJohn Marino /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2732*e4b17023SJohn Marino    constant that is not in the constant pool and not in the condition
2733*e4b17023SJohn Marino    of an IF_THEN_ELSE.  */
2734*e4b17023SJohn Marino 
2735*e4b17023SJohn Marino static int
computed_jump_p_1(const_rtx x)2736*e4b17023SJohn Marino computed_jump_p_1 (const_rtx x)
2737*e4b17023SJohn Marino {
2738*e4b17023SJohn Marino   const enum rtx_code code = GET_CODE (x);
2739*e4b17023SJohn Marino   int i, j;
2740*e4b17023SJohn Marino   const char *fmt;
2741*e4b17023SJohn Marino 
2742*e4b17023SJohn Marino   switch (code)
2743*e4b17023SJohn Marino     {
2744*e4b17023SJohn Marino     case LABEL_REF:
2745*e4b17023SJohn Marino     case PC:
2746*e4b17023SJohn Marino       return 0;
2747*e4b17023SJohn Marino 
2748*e4b17023SJohn Marino     case CONST:
2749*e4b17023SJohn Marino     case CONST_INT:
2750*e4b17023SJohn Marino     case CONST_DOUBLE:
2751*e4b17023SJohn Marino     case CONST_FIXED:
2752*e4b17023SJohn Marino     case CONST_VECTOR:
2753*e4b17023SJohn Marino     case SYMBOL_REF:
2754*e4b17023SJohn Marino     case REG:
2755*e4b17023SJohn Marino       return 1;
2756*e4b17023SJohn Marino 
2757*e4b17023SJohn Marino     case MEM:
2758*e4b17023SJohn Marino       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2759*e4b17023SJohn Marino 		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2760*e4b17023SJohn Marino 
2761*e4b17023SJohn Marino     case IF_THEN_ELSE:
2762*e4b17023SJohn Marino       return (computed_jump_p_1 (XEXP (x, 1))
2763*e4b17023SJohn Marino 	      || computed_jump_p_1 (XEXP (x, 2)));
2764*e4b17023SJohn Marino 
2765*e4b17023SJohn Marino     default:
2766*e4b17023SJohn Marino       break;
2767*e4b17023SJohn Marino     }
2768*e4b17023SJohn Marino 
2769*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
2770*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2771*e4b17023SJohn Marino     {
2772*e4b17023SJohn Marino       if (fmt[i] == 'e'
2773*e4b17023SJohn Marino 	  && computed_jump_p_1 (XEXP (x, i)))
2774*e4b17023SJohn Marino 	return 1;
2775*e4b17023SJohn Marino 
2776*e4b17023SJohn Marino       else if (fmt[i] == 'E')
2777*e4b17023SJohn Marino 	for (j = 0; j < XVECLEN (x, i); j++)
2778*e4b17023SJohn Marino 	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
2779*e4b17023SJohn Marino 	    return 1;
2780*e4b17023SJohn Marino     }
2781*e4b17023SJohn Marino 
2782*e4b17023SJohn Marino   return 0;
2783*e4b17023SJohn Marino }
2784*e4b17023SJohn Marino 
2785*e4b17023SJohn Marino /* Return nonzero if INSN is an indirect jump (aka computed jump).
2786*e4b17023SJohn Marino 
2787*e4b17023SJohn Marino    Tablejumps and casesi insns are not considered indirect jumps;
2788*e4b17023SJohn Marino    we can recognize them by a (use (label_ref)).  */
2789*e4b17023SJohn Marino 
2790*e4b17023SJohn Marino int
computed_jump_p(const_rtx insn)2791*e4b17023SJohn Marino computed_jump_p (const_rtx insn)
2792*e4b17023SJohn Marino {
2793*e4b17023SJohn Marino   int i;
2794*e4b17023SJohn Marino   if (JUMP_P (insn))
2795*e4b17023SJohn Marino     {
2796*e4b17023SJohn Marino       rtx pat = PATTERN (insn);
2797*e4b17023SJohn Marino 
2798*e4b17023SJohn Marino       /* If we have a JUMP_LABEL set, we're not a computed jump.  */
2799*e4b17023SJohn Marino       if (JUMP_LABEL (insn) != NULL)
2800*e4b17023SJohn Marino 	return 0;
2801*e4b17023SJohn Marino 
2802*e4b17023SJohn Marino       if (GET_CODE (pat) == PARALLEL)
2803*e4b17023SJohn Marino 	{
2804*e4b17023SJohn Marino 	  int len = XVECLEN (pat, 0);
2805*e4b17023SJohn Marino 	  int has_use_labelref = 0;
2806*e4b17023SJohn Marino 
2807*e4b17023SJohn Marino 	  for (i = len - 1; i >= 0; i--)
2808*e4b17023SJohn Marino 	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2809*e4b17023SJohn Marino 		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2810*e4b17023SJohn Marino 		    == LABEL_REF))
2811*e4b17023SJohn Marino 	      has_use_labelref = 1;
2812*e4b17023SJohn Marino 
2813*e4b17023SJohn Marino 	  if (! has_use_labelref)
2814*e4b17023SJohn Marino 	    for (i = len - 1; i >= 0; i--)
2815*e4b17023SJohn Marino 	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2816*e4b17023SJohn Marino 		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2817*e4b17023SJohn Marino 		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2818*e4b17023SJohn Marino 		return 1;
2819*e4b17023SJohn Marino 	}
2820*e4b17023SJohn Marino       else if (GET_CODE (pat) == SET
2821*e4b17023SJohn Marino 	       && SET_DEST (pat) == pc_rtx
2822*e4b17023SJohn Marino 	       && computed_jump_p_1 (SET_SRC (pat)))
2823*e4b17023SJohn Marino 	return 1;
2824*e4b17023SJohn Marino     }
2825*e4b17023SJohn Marino   return 0;
2826*e4b17023SJohn Marino }
2827*e4b17023SJohn Marino 
2828*e4b17023SJohn Marino /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2829*e4b17023SJohn Marino    calls.  Processes the subexpressions of EXP and passes them to F.  */
2830*e4b17023SJohn Marino static int
for_each_rtx_1(rtx exp,int n,rtx_function f,void * data)2831*e4b17023SJohn Marino for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2832*e4b17023SJohn Marino {
2833*e4b17023SJohn Marino   int result, i, j;
2834*e4b17023SJohn Marino   const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2835*e4b17023SJohn Marino   rtx *x;
2836*e4b17023SJohn Marino 
2837*e4b17023SJohn Marino   for (; format[n] != '\0'; n++)
2838*e4b17023SJohn Marino     {
2839*e4b17023SJohn Marino       switch (format[n])
2840*e4b17023SJohn Marino 	{
2841*e4b17023SJohn Marino 	case 'e':
2842*e4b17023SJohn Marino 	  /* Call F on X.  */
2843*e4b17023SJohn Marino 	  x = &XEXP (exp, n);
2844*e4b17023SJohn Marino 	  result = (*f) (x, data);
2845*e4b17023SJohn Marino 	  if (result == -1)
2846*e4b17023SJohn Marino 	    /* Do not traverse sub-expressions.  */
2847*e4b17023SJohn Marino 	    continue;
2848*e4b17023SJohn Marino 	  else if (result != 0)
2849*e4b17023SJohn Marino 	    /* Stop the traversal.  */
2850*e4b17023SJohn Marino 	    return result;
2851*e4b17023SJohn Marino 
2852*e4b17023SJohn Marino 	  if (*x == NULL_RTX)
2853*e4b17023SJohn Marino 	    /* There are no sub-expressions.  */
2854*e4b17023SJohn Marino 	    continue;
2855*e4b17023SJohn Marino 
2856*e4b17023SJohn Marino 	  i = non_rtx_starting_operands[GET_CODE (*x)];
2857*e4b17023SJohn Marino 	  if (i >= 0)
2858*e4b17023SJohn Marino 	    {
2859*e4b17023SJohn Marino 	      result = for_each_rtx_1 (*x, i, f, data);
2860*e4b17023SJohn Marino 	      if (result != 0)
2861*e4b17023SJohn Marino 		return result;
2862*e4b17023SJohn Marino 	    }
2863*e4b17023SJohn Marino 	  break;
2864*e4b17023SJohn Marino 
2865*e4b17023SJohn Marino 	case 'V':
2866*e4b17023SJohn Marino 	case 'E':
2867*e4b17023SJohn Marino 	  if (XVEC (exp, n) == 0)
2868*e4b17023SJohn Marino 	    continue;
2869*e4b17023SJohn Marino 	  for (j = 0; j < XVECLEN (exp, n); ++j)
2870*e4b17023SJohn Marino 	    {
2871*e4b17023SJohn Marino 	      /* Call F on X.  */
2872*e4b17023SJohn Marino 	      x = &XVECEXP (exp, n, j);
2873*e4b17023SJohn Marino 	      result = (*f) (x, data);
2874*e4b17023SJohn Marino 	      if (result == -1)
2875*e4b17023SJohn Marino 		/* Do not traverse sub-expressions.  */
2876*e4b17023SJohn Marino 		continue;
2877*e4b17023SJohn Marino 	      else if (result != 0)
2878*e4b17023SJohn Marino 		/* Stop the traversal.  */
2879*e4b17023SJohn Marino 		return result;
2880*e4b17023SJohn Marino 
2881*e4b17023SJohn Marino 	      if (*x == NULL_RTX)
2882*e4b17023SJohn Marino 		/* There are no sub-expressions.  */
2883*e4b17023SJohn Marino 		continue;
2884*e4b17023SJohn Marino 
2885*e4b17023SJohn Marino 	      i = non_rtx_starting_operands[GET_CODE (*x)];
2886*e4b17023SJohn Marino 	      if (i >= 0)
2887*e4b17023SJohn Marino 		{
2888*e4b17023SJohn Marino 		  result = for_each_rtx_1 (*x, i, f, data);
2889*e4b17023SJohn Marino 		  if (result != 0)
2890*e4b17023SJohn Marino 		    return result;
2891*e4b17023SJohn Marino 	        }
2892*e4b17023SJohn Marino 	    }
2893*e4b17023SJohn Marino 	  break;
2894*e4b17023SJohn Marino 
2895*e4b17023SJohn Marino 	default:
2896*e4b17023SJohn Marino 	  /* Nothing to do.  */
2897*e4b17023SJohn Marino 	  break;
2898*e4b17023SJohn Marino 	}
2899*e4b17023SJohn Marino     }
2900*e4b17023SJohn Marino 
2901*e4b17023SJohn Marino   return 0;
2902*e4b17023SJohn Marino }
2903*e4b17023SJohn Marino 
2904*e4b17023SJohn Marino /* Traverse X via depth-first search, calling F for each
2905*e4b17023SJohn Marino    sub-expression (including X itself).  F is also passed the DATA.
2906*e4b17023SJohn Marino    If F returns -1, do not traverse sub-expressions, but continue
2907*e4b17023SJohn Marino    traversing the rest of the tree.  If F ever returns any other
2908*e4b17023SJohn Marino    nonzero value, stop the traversal, and return the value returned
2909*e4b17023SJohn Marino    by F.  Otherwise, return 0.  This function does not traverse inside
2910*e4b17023SJohn Marino    tree structure that contains RTX_EXPRs, or into sub-expressions
2911*e4b17023SJohn Marino    whose format code is `0' since it is not known whether or not those
2912*e4b17023SJohn Marino    codes are actually RTL.
2913*e4b17023SJohn Marino 
2914*e4b17023SJohn Marino    This routine is very general, and could (should?) be used to
2915*e4b17023SJohn Marino    implement many of the other routines in this file.  */
2916*e4b17023SJohn Marino 
2917*e4b17023SJohn Marino int
for_each_rtx(rtx * x,rtx_function f,void * data)2918*e4b17023SJohn Marino for_each_rtx (rtx *x, rtx_function f, void *data)
2919*e4b17023SJohn Marino {
2920*e4b17023SJohn Marino   int result;
2921*e4b17023SJohn Marino   int i;
2922*e4b17023SJohn Marino 
2923*e4b17023SJohn Marino   /* Call F on X.  */
2924*e4b17023SJohn Marino   result = (*f) (x, data);
2925*e4b17023SJohn Marino   if (result == -1)
2926*e4b17023SJohn Marino     /* Do not traverse sub-expressions.  */
2927*e4b17023SJohn Marino     return 0;
2928*e4b17023SJohn Marino   else if (result != 0)
2929*e4b17023SJohn Marino     /* Stop the traversal.  */
2930*e4b17023SJohn Marino     return result;
2931*e4b17023SJohn Marino 
2932*e4b17023SJohn Marino   if (*x == NULL_RTX)
2933*e4b17023SJohn Marino     /* There are no sub-expressions.  */
2934*e4b17023SJohn Marino     return 0;
2935*e4b17023SJohn Marino 
2936*e4b17023SJohn Marino   i = non_rtx_starting_operands[GET_CODE (*x)];
2937*e4b17023SJohn Marino   if (i < 0)
2938*e4b17023SJohn Marino     return 0;
2939*e4b17023SJohn Marino 
2940*e4b17023SJohn Marino   return for_each_rtx_1 (*x, i, f, data);
2941*e4b17023SJohn Marino }
2942*e4b17023SJohn Marino 
2943*e4b17023SJohn Marino 
2944*e4b17023SJohn Marino 
2945*e4b17023SJohn Marino /* Data structure that holds the internal state communicated between
2946*e4b17023SJohn Marino    for_each_inc_dec, for_each_inc_dec_find_mem and
2947*e4b17023SJohn Marino    for_each_inc_dec_find_inc_dec.  */
2948*e4b17023SJohn Marino 
2949*e4b17023SJohn Marino struct for_each_inc_dec_ops {
2950*e4b17023SJohn Marino   /* The function to be called for each autoinc operation found.  */
2951*e4b17023SJohn Marino   for_each_inc_dec_fn fn;
2952*e4b17023SJohn Marino   /* The opaque argument to be passed to it.  */
2953*e4b17023SJohn Marino   void *arg;
2954*e4b17023SJohn Marino   /* The MEM we're visiting, if any.  */
2955*e4b17023SJohn Marino   rtx mem;
2956*e4b17023SJohn Marino };
2957*e4b17023SJohn Marino 
2958*e4b17023SJohn Marino static int for_each_inc_dec_find_mem (rtx *r, void *d);
2959*e4b17023SJohn Marino 
2960*e4b17023SJohn Marino /* Find PRE/POST-INC/DEC/MODIFY operations within *R, extract the
2961*e4b17023SJohn Marino    operands of the equivalent add insn and pass the result to the
2962*e4b17023SJohn Marino    operator specified by *D.  */
2963*e4b17023SJohn Marino 
2964*e4b17023SJohn Marino static int
for_each_inc_dec_find_inc_dec(rtx * r,void * d)2965*e4b17023SJohn Marino for_each_inc_dec_find_inc_dec (rtx *r, void *d)
2966*e4b17023SJohn Marino {
2967*e4b17023SJohn Marino   rtx x = *r;
2968*e4b17023SJohn Marino   struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *)d;
2969*e4b17023SJohn Marino 
2970*e4b17023SJohn Marino   switch (GET_CODE (x))
2971*e4b17023SJohn Marino     {
2972*e4b17023SJohn Marino     case PRE_INC:
2973*e4b17023SJohn Marino     case POST_INC:
2974*e4b17023SJohn Marino       {
2975*e4b17023SJohn Marino 	int size = GET_MODE_SIZE (GET_MODE (data->mem));
2976*e4b17023SJohn Marino 	rtx r1 = XEXP (x, 0);
2977*e4b17023SJohn Marino 	rtx c = gen_int_mode (size, GET_MODE (r1));
2978*e4b17023SJohn Marino 	return data->fn (data->mem, x, r1, r1, c, data->arg);
2979*e4b17023SJohn Marino       }
2980*e4b17023SJohn Marino 
2981*e4b17023SJohn Marino     case PRE_DEC:
2982*e4b17023SJohn Marino     case POST_DEC:
2983*e4b17023SJohn Marino       {
2984*e4b17023SJohn Marino 	int size = GET_MODE_SIZE (GET_MODE (data->mem));
2985*e4b17023SJohn Marino 	rtx r1 = XEXP (x, 0);
2986*e4b17023SJohn Marino 	rtx c = gen_int_mode (-size, GET_MODE (r1));
2987*e4b17023SJohn Marino 	return data->fn (data->mem, x, r1, r1, c, data->arg);
2988*e4b17023SJohn Marino       }
2989*e4b17023SJohn Marino 
2990*e4b17023SJohn Marino     case PRE_MODIFY:
2991*e4b17023SJohn Marino     case POST_MODIFY:
2992*e4b17023SJohn Marino       {
2993*e4b17023SJohn Marino 	rtx r1 = XEXP (x, 0);
2994*e4b17023SJohn Marino 	rtx add = XEXP (x, 1);
2995*e4b17023SJohn Marino 	return data->fn (data->mem, x, r1, add, NULL, data->arg);
2996*e4b17023SJohn Marino       }
2997*e4b17023SJohn Marino 
2998*e4b17023SJohn Marino     case MEM:
2999*e4b17023SJohn Marino       {
3000*e4b17023SJohn Marino 	rtx save = data->mem;
3001*e4b17023SJohn Marino 	int ret = for_each_inc_dec_find_mem (r, d);
3002*e4b17023SJohn Marino 	data->mem = save;
3003*e4b17023SJohn Marino 	return ret;
3004*e4b17023SJohn Marino       }
3005*e4b17023SJohn Marino 
3006*e4b17023SJohn Marino     default:
3007*e4b17023SJohn Marino       return 0;
3008*e4b17023SJohn Marino     }
3009*e4b17023SJohn Marino }
3010*e4b17023SJohn Marino 
3011*e4b17023SJohn Marino /* If *R is a MEM, find PRE/POST-INC/DEC/MODIFY operations within its
3012*e4b17023SJohn Marino    address, extract the operands of the equivalent add insn and pass
3013*e4b17023SJohn Marino    the result to the operator specified by *D.  */
3014*e4b17023SJohn Marino 
3015*e4b17023SJohn Marino static int
for_each_inc_dec_find_mem(rtx * r,void * d)3016*e4b17023SJohn Marino for_each_inc_dec_find_mem (rtx *r, void *d)
3017*e4b17023SJohn Marino {
3018*e4b17023SJohn Marino   rtx x = *r;
3019*e4b17023SJohn Marino   if (x != NULL_RTX && MEM_P (x))
3020*e4b17023SJohn Marino     {
3021*e4b17023SJohn Marino       struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *) d;
3022*e4b17023SJohn Marino       int result;
3023*e4b17023SJohn Marino 
3024*e4b17023SJohn Marino       data->mem = x;
3025*e4b17023SJohn Marino 
3026*e4b17023SJohn Marino       result = for_each_rtx (&XEXP (x, 0), for_each_inc_dec_find_inc_dec,
3027*e4b17023SJohn Marino 			     data);
3028*e4b17023SJohn Marino       if (result)
3029*e4b17023SJohn Marino 	return result;
3030*e4b17023SJohn Marino 
3031*e4b17023SJohn Marino       return -1;
3032*e4b17023SJohn Marino     }
3033*e4b17023SJohn Marino   return 0;
3034*e4b17023SJohn Marino }
3035*e4b17023SJohn Marino 
3036*e4b17023SJohn Marino /* Traverse *X looking for MEMs, and for autoinc operations within
3037*e4b17023SJohn Marino    them.  For each such autoinc operation found, call FN, passing it
3038*e4b17023SJohn Marino    the innermost enclosing MEM, the operation itself, the RTX modified
3039*e4b17023SJohn Marino    by the operation, two RTXs (the second may be NULL) that, once
3040*e4b17023SJohn Marino    added, represent the value to be held by the modified RTX
3041*e4b17023SJohn Marino    afterwards, and ARG.  FN is to return -1 to skip looking for other
3042*e4b17023SJohn Marino    autoinc operations within the visited operation, 0 to continue the
3043*e4b17023SJohn Marino    traversal, or any other value to have it returned to the caller of
3044*e4b17023SJohn Marino    for_each_inc_dec.  */
3045*e4b17023SJohn Marino 
3046*e4b17023SJohn Marino int
for_each_inc_dec(rtx * x,for_each_inc_dec_fn fn,void * arg)3047*e4b17023SJohn Marino for_each_inc_dec (rtx *x,
3048*e4b17023SJohn Marino 		  for_each_inc_dec_fn fn,
3049*e4b17023SJohn Marino 		  void *arg)
3050*e4b17023SJohn Marino {
3051*e4b17023SJohn Marino   struct for_each_inc_dec_ops data;
3052*e4b17023SJohn Marino 
3053*e4b17023SJohn Marino   data.fn = fn;
3054*e4b17023SJohn Marino   data.arg = arg;
3055*e4b17023SJohn Marino   data.mem = NULL;
3056*e4b17023SJohn Marino 
3057*e4b17023SJohn Marino   return for_each_rtx (x, for_each_inc_dec_find_mem, &data);
3058*e4b17023SJohn Marino }
3059*e4b17023SJohn Marino 
3060*e4b17023SJohn Marino 
3061*e4b17023SJohn Marino /* Searches X for any reference to REGNO, returning the rtx of the
3062*e4b17023SJohn Marino    reference found if any.  Otherwise, returns NULL_RTX.  */
3063*e4b17023SJohn Marino 
3064*e4b17023SJohn Marino rtx
regno_use_in(unsigned int regno,rtx x)3065*e4b17023SJohn Marino regno_use_in (unsigned int regno, rtx x)
3066*e4b17023SJohn Marino {
3067*e4b17023SJohn Marino   const char *fmt;
3068*e4b17023SJohn Marino   int i, j;
3069*e4b17023SJohn Marino   rtx tem;
3070*e4b17023SJohn Marino 
3071*e4b17023SJohn Marino   if (REG_P (x) && REGNO (x) == regno)
3072*e4b17023SJohn Marino     return x;
3073*e4b17023SJohn Marino 
3074*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (GET_CODE (x));
3075*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3076*e4b17023SJohn Marino     {
3077*e4b17023SJohn Marino       if (fmt[i] == 'e')
3078*e4b17023SJohn Marino 	{
3079*e4b17023SJohn Marino 	  if ((tem = regno_use_in (regno, XEXP (x, i))))
3080*e4b17023SJohn Marino 	    return tem;
3081*e4b17023SJohn Marino 	}
3082*e4b17023SJohn Marino       else if (fmt[i] == 'E')
3083*e4b17023SJohn Marino 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3084*e4b17023SJohn Marino 	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3085*e4b17023SJohn Marino 	    return tem;
3086*e4b17023SJohn Marino     }
3087*e4b17023SJohn Marino 
3088*e4b17023SJohn Marino   return NULL_RTX;
3089*e4b17023SJohn Marino }
3090*e4b17023SJohn Marino 
3091*e4b17023SJohn Marino /* Return a value indicating whether OP, an operand of a commutative
3092*e4b17023SJohn Marino    operation, is preferred as the first or second operand.  The higher
3093*e4b17023SJohn Marino    the value, the stronger the preference for being the first operand.
3094*e4b17023SJohn Marino    We use negative values to indicate a preference for the first operand
3095*e4b17023SJohn Marino    and positive values for the second operand.  */
3096*e4b17023SJohn Marino 
3097*e4b17023SJohn Marino int
commutative_operand_precedence(rtx op)3098*e4b17023SJohn Marino commutative_operand_precedence (rtx op)
3099*e4b17023SJohn Marino {
3100*e4b17023SJohn Marino   enum rtx_code code = GET_CODE (op);
3101*e4b17023SJohn Marino 
3102*e4b17023SJohn Marino   /* Constants always come the second operand.  Prefer "nice" constants.  */
3103*e4b17023SJohn Marino   if (code == CONST_INT)
3104*e4b17023SJohn Marino     return -8;
3105*e4b17023SJohn Marino   if (code == CONST_DOUBLE)
3106*e4b17023SJohn Marino     return -7;
3107*e4b17023SJohn Marino   if (code == CONST_FIXED)
3108*e4b17023SJohn Marino     return -7;
3109*e4b17023SJohn Marino   op = avoid_constant_pool_reference (op);
3110*e4b17023SJohn Marino   code = GET_CODE (op);
3111*e4b17023SJohn Marino 
3112*e4b17023SJohn Marino   switch (GET_RTX_CLASS (code))
3113*e4b17023SJohn Marino     {
3114*e4b17023SJohn Marino     case RTX_CONST_OBJ:
3115*e4b17023SJohn Marino       if (code == CONST_INT)
3116*e4b17023SJohn Marino         return -6;
3117*e4b17023SJohn Marino       if (code == CONST_DOUBLE)
3118*e4b17023SJohn Marino         return -5;
3119*e4b17023SJohn Marino       if (code == CONST_FIXED)
3120*e4b17023SJohn Marino         return -5;
3121*e4b17023SJohn Marino       return -4;
3122*e4b17023SJohn Marino 
3123*e4b17023SJohn Marino     case RTX_EXTRA:
3124*e4b17023SJohn Marino       /* SUBREGs of objects should come second.  */
3125*e4b17023SJohn Marino       if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3126*e4b17023SJohn Marino         return -3;
3127*e4b17023SJohn Marino       return 0;
3128*e4b17023SJohn Marino 
3129*e4b17023SJohn Marino     case RTX_OBJ:
3130*e4b17023SJohn Marino       /* Complex expressions should be the first, so decrease priority
3131*e4b17023SJohn Marino          of objects.  Prefer pointer objects over non pointer objects.  */
3132*e4b17023SJohn Marino       if ((REG_P (op) && REG_POINTER (op))
3133*e4b17023SJohn Marino 	  || (MEM_P (op) && MEM_POINTER (op)))
3134*e4b17023SJohn Marino 	return -1;
3135*e4b17023SJohn Marino       return -2;
3136*e4b17023SJohn Marino 
3137*e4b17023SJohn Marino     case RTX_COMM_ARITH:
3138*e4b17023SJohn Marino       /* Prefer operands that are themselves commutative to be first.
3139*e4b17023SJohn Marino          This helps to make things linear.  In particular,
3140*e4b17023SJohn Marino          (and (and (reg) (reg)) (not (reg))) is canonical.  */
3141*e4b17023SJohn Marino       return 4;
3142*e4b17023SJohn Marino 
3143*e4b17023SJohn Marino     case RTX_BIN_ARITH:
3144*e4b17023SJohn Marino       /* If only one operand is a binary expression, it will be the first
3145*e4b17023SJohn Marino          operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
3146*e4b17023SJohn Marino          is canonical, although it will usually be further simplified.  */
3147*e4b17023SJohn Marino       return 2;
3148*e4b17023SJohn Marino 
3149*e4b17023SJohn Marino     case RTX_UNARY:
3150*e4b17023SJohn Marino       /* Then prefer NEG and NOT.  */
3151*e4b17023SJohn Marino       if (code == NEG || code == NOT)
3152*e4b17023SJohn Marino         return 1;
3153*e4b17023SJohn Marino 
3154*e4b17023SJohn Marino     default:
3155*e4b17023SJohn Marino       return 0;
3156*e4b17023SJohn Marino     }
3157*e4b17023SJohn Marino }
3158*e4b17023SJohn Marino 
3159*e4b17023SJohn Marino /* Return 1 iff it is necessary to swap operands of commutative operation
3160*e4b17023SJohn Marino    in order to canonicalize expression.  */
3161*e4b17023SJohn Marino 
3162*e4b17023SJohn Marino bool
swap_commutative_operands_p(rtx x,rtx y)3163*e4b17023SJohn Marino swap_commutative_operands_p (rtx x, rtx y)
3164*e4b17023SJohn Marino {
3165*e4b17023SJohn Marino   return (commutative_operand_precedence (x)
3166*e4b17023SJohn Marino 	  < commutative_operand_precedence (y));
3167*e4b17023SJohn Marino }
3168*e4b17023SJohn Marino 
3169*e4b17023SJohn Marino /* Return 1 if X is an autoincrement side effect and the register is
3170*e4b17023SJohn Marino    not the stack pointer.  */
3171*e4b17023SJohn Marino int
auto_inc_p(const_rtx x)3172*e4b17023SJohn Marino auto_inc_p (const_rtx x)
3173*e4b17023SJohn Marino {
3174*e4b17023SJohn Marino   switch (GET_CODE (x))
3175*e4b17023SJohn Marino     {
3176*e4b17023SJohn Marino     case PRE_INC:
3177*e4b17023SJohn Marino     case POST_INC:
3178*e4b17023SJohn Marino     case PRE_DEC:
3179*e4b17023SJohn Marino     case POST_DEC:
3180*e4b17023SJohn Marino     case PRE_MODIFY:
3181*e4b17023SJohn Marino     case POST_MODIFY:
3182*e4b17023SJohn Marino       /* There are no REG_INC notes for SP.  */
3183*e4b17023SJohn Marino       if (XEXP (x, 0) != stack_pointer_rtx)
3184*e4b17023SJohn Marino 	return 1;
3185*e4b17023SJohn Marino     default:
3186*e4b17023SJohn Marino       break;
3187*e4b17023SJohn Marino     }
3188*e4b17023SJohn Marino   return 0;
3189*e4b17023SJohn Marino }
3190*e4b17023SJohn Marino 
3191*e4b17023SJohn Marino /* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
3192*e4b17023SJohn Marino int
loc_mentioned_in_p(rtx * loc,const_rtx in)3193*e4b17023SJohn Marino loc_mentioned_in_p (rtx *loc, const_rtx in)
3194*e4b17023SJohn Marino {
3195*e4b17023SJohn Marino   enum rtx_code code;
3196*e4b17023SJohn Marino   const char *fmt;
3197*e4b17023SJohn Marino   int i, j;
3198*e4b17023SJohn Marino 
3199*e4b17023SJohn Marino   if (!in)
3200*e4b17023SJohn Marino     return 0;
3201*e4b17023SJohn Marino 
3202*e4b17023SJohn Marino   code = GET_CODE (in);
3203*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
3204*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3205*e4b17023SJohn Marino     {
3206*e4b17023SJohn Marino       if (fmt[i] == 'e')
3207*e4b17023SJohn Marino 	{
3208*e4b17023SJohn Marino 	  if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3209*e4b17023SJohn Marino 	    return 1;
3210*e4b17023SJohn Marino 	}
3211*e4b17023SJohn Marino       else if (fmt[i] == 'E')
3212*e4b17023SJohn Marino 	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3213*e4b17023SJohn Marino 	  if (loc == &XVECEXP (in, i, j)
3214*e4b17023SJohn Marino 	      || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3215*e4b17023SJohn Marino 	    return 1;
3216*e4b17023SJohn Marino     }
3217*e4b17023SJohn Marino   return 0;
3218*e4b17023SJohn Marino }
3219*e4b17023SJohn Marino 
3220*e4b17023SJohn Marino /* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
3221*e4b17023SJohn Marino    and SUBREG_BYTE, return the bit offset where the subreg begins
3222*e4b17023SJohn Marino    (counting from the least significant bit of the operand).  */
3223*e4b17023SJohn Marino 
3224*e4b17023SJohn Marino unsigned int
subreg_lsb_1(enum machine_mode outer_mode,enum machine_mode inner_mode,unsigned int subreg_byte)3225*e4b17023SJohn Marino subreg_lsb_1 (enum machine_mode outer_mode,
3226*e4b17023SJohn Marino 	      enum machine_mode inner_mode,
3227*e4b17023SJohn Marino 	      unsigned int subreg_byte)
3228*e4b17023SJohn Marino {
3229*e4b17023SJohn Marino   unsigned int bitpos;
3230*e4b17023SJohn Marino   unsigned int byte;
3231*e4b17023SJohn Marino   unsigned int word;
3232*e4b17023SJohn Marino 
3233*e4b17023SJohn Marino   /* A paradoxical subreg begins at bit position 0.  */
3234*e4b17023SJohn Marino   if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
3235*e4b17023SJohn Marino     return 0;
3236*e4b17023SJohn Marino 
3237*e4b17023SJohn Marino   if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3238*e4b17023SJohn Marino     /* If the subreg crosses a word boundary ensure that
3239*e4b17023SJohn Marino        it also begins and ends on a word boundary.  */
3240*e4b17023SJohn Marino     gcc_assert (!((subreg_byte % UNITS_PER_WORD
3241*e4b17023SJohn Marino 		  + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3242*e4b17023SJohn Marino 		  && (subreg_byte % UNITS_PER_WORD
3243*e4b17023SJohn Marino 		      || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3244*e4b17023SJohn Marino 
3245*e4b17023SJohn Marino   if (WORDS_BIG_ENDIAN)
3246*e4b17023SJohn Marino     word = (GET_MODE_SIZE (inner_mode)
3247*e4b17023SJohn Marino 	    - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3248*e4b17023SJohn Marino   else
3249*e4b17023SJohn Marino     word = subreg_byte / UNITS_PER_WORD;
3250*e4b17023SJohn Marino   bitpos = word * BITS_PER_WORD;
3251*e4b17023SJohn Marino 
3252*e4b17023SJohn Marino   if (BYTES_BIG_ENDIAN)
3253*e4b17023SJohn Marino     byte = (GET_MODE_SIZE (inner_mode)
3254*e4b17023SJohn Marino 	    - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3255*e4b17023SJohn Marino   else
3256*e4b17023SJohn Marino     byte = subreg_byte % UNITS_PER_WORD;
3257*e4b17023SJohn Marino   bitpos += byte * BITS_PER_UNIT;
3258*e4b17023SJohn Marino 
3259*e4b17023SJohn Marino   return bitpos;
3260*e4b17023SJohn Marino }
3261*e4b17023SJohn Marino 
3262*e4b17023SJohn Marino /* Given a subreg X, return the bit offset where the subreg begins
3263*e4b17023SJohn Marino    (counting from the least significant bit of the reg).  */
3264*e4b17023SJohn Marino 
3265*e4b17023SJohn Marino unsigned int
subreg_lsb(const_rtx x)3266*e4b17023SJohn Marino subreg_lsb (const_rtx x)
3267*e4b17023SJohn Marino {
3268*e4b17023SJohn Marino   return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3269*e4b17023SJohn Marino 		       SUBREG_BYTE (x));
3270*e4b17023SJohn Marino }
3271*e4b17023SJohn Marino 
3272*e4b17023SJohn Marino /* Fill in information about a subreg of a hard register.
3273*e4b17023SJohn Marino    xregno - A regno of an inner hard subreg_reg (or what will become one).
3274*e4b17023SJohn Marino    xmode  - The mode of xregno.
3275*e4b17023SJohn Marino    offset - The byte offset.
3276*e4b17023SJohn Marino    ymode  - The mode of a top level SUBREG (or what may become one).
3277*e4b17023SJohn Marino    info   - Pointer to structure to fill in.  */
3278*e4b17023SJohn Marino void
subreg_get_info(unsigned int xregno,enum machine_mode xmode,unsigned int offset,enum machine_mode ymode,struct subreg_info * info)3279*e4b17023SJohn Marino subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3280*e4b17023SJohn Marino 		 unsigned int offset, enum machine_mode ymode,
3281*e4b17023SJohn Marino 		 struct subreg_info *info)
3282*e4b17023SJohn Marino {
3283*e4b17023SJohn Marino   int nregs_xmode, nregs_ymode;
3284*e4b17023SJohn Marino   int mode_multiple, nregs_multiple;
3285*e4b17023SJohn Marino   int offset_adj, y_offset, y_offset_adj;
3286*e4b17023SJohn Marino   int regsize_xmode, regsize_ymode;
3287*e4b17023SJohn Marino   bool rknown;
3288*e4b17023SJohn Marino 
3289*e4b17023SJohn Marino   gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3290*e4b17023SJohn Marino 
3291*e4b17023SJohn Marino   rknown = false;
3292*e4b17023SJohn Marino 
3293*e4b17023SJohn Marino   /* If there are holes in a non-scalar mode in registers, we expect
3294*e4b17023SJohn Marino      that it is made up of its units concatenated together.  */
3295*e4b17023SJohn Marino   if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3296*e4b17023SJohn Marino     {
3297*e4b17023SJohn Marino       enum machine_mode xmode_unit;
3298*e4b17023SJohn Marino 
3299*e4b17023SJohn Marino       nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3300*e4b17023SJohn Marino       if (GET_MODE_INNER (xmode) == VOIDmode)
3301*e4b17023SJohn Marino 	xmode_unit = xmode;
3302*e4b17023SJohn Marino       else
3303*e4b17023SJohn Marino 	xmode_unit = GET_MODE_INNER (xmode);
3304*e4b17023SJohn Marino       gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3305*e4b17023SJohn Marino       gcc_assert (nregs_xmode
3306*e4b17023SJohn Marino 		  == (GET_MODE_NUNITS (xmode)
3307*e4b17023SJohn Marino 		      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3308*e4b17023SJohn Marino       gcc_assert (hard_regno_nregs[xregno][xmode]
3309*e4b17023SJohn Marino 		  == (hard_regno_nregs[xregno][xmode_unit]
3310*e4b17023SJohn Marino 		      * GET_MODE_NUNITS (xmode)));
3311*e4b17023SJohn Marino 
3312*e4b17023SJohn Marino       /* You can only ask for a SUBREG of a value with holes in the middle
3313*e4b17023SJohn Marino 	 if you don't cross the holes.  (Such a SUBREG should be done by
3314*e4b17023SJohn Marino 	 picking a different register class, or doing it in memory if
3315*e4b17023SJohn Marino 	 necessary.)  An example of a value with holes is XCmode on 32-bit
3316*e4b17023SJohn Marino 	 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3317*e4b17023SJohn Marino 	 3 for each part, but in memory it's two 128-bit parts.
3318*e4b17023SJohn Marino 	 Padding is assumed to be at the end (not necessarily the 'high part')
3319*e4b17023SJohn Marino 	 of each unit.  */
3320*e4b17023SJohn Marino       if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3321*e4b17023SJohn Marino 	   < GET_MODE_NUNITS (xmode))
3322*e4b17023SJohn Marino 	  && (offset / GET_MODE_SIZE (xmode_unit)
3323*e4b17023SJohn Marino 	      != ((offset + GET_MODE_SIZE (ymode) - 1)
3324*e4b17023SJohn Marino 		  / GET_MODE_SIZE (xmode_unit))))
3325*e4b17023SJohn Marino 	{
3326*e4b17023SJohn Marino 	  info->representable_p = false;
3327*e4b17023SJohn Marino 	  rknown = true;
3328*e4b17023SJohn Marino 	}
3329*e4b17023SJohn Marino     }
3330*e4b17023SJohn Marino   else
3331*e4b17023SJohn Marino     nregs_xmode = hard_regno_nregs[xregno][xmode];
3332*e4b17023SJohn Marino 
3333*e4b17023SJohn Marino   nregs_ymode = hard_regno_nregs[xregno][ymode];
3334*e4b17023SJohn Marino 
3335*e4b17023SJohn Marino   /* Paradoxical subregs are otherwise valid.  */
3336*e4b17023SJohn Marino   if (!rknown
3337*e4b17023SJohn Marino       && offset == 0
3338*e4b17023SJohn Marino       && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
3339*e4b17023SJohn Marino     {
3340*e4b17023SJohn Marino       info->representable_p = true;
3341*e4b17023SJohn Marino       /* If this is a big endian paradoxical subreg, which uses more
3342*e4b17023SJohn Marino 	 actual hard registers than the original register, we must
3343*e4b17023SJohn Marino 	 return a negative offset so that we find the proper highpart
3344*e4b17023SJohn Marino 	 of the register.  */
3345*e4b17023SJohn Marino       if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3346*e4b17023SJohn Marino 	  ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3347*e4b17023SJohn Marino 	info->offset = nregs_xmode - nregs_ymode;
3348*e4b17023SJohn Marino       else
3349*e4b17023SJohn Marino 	info->offset = 0;
3350*e4b17023SJohn Marino       info->nregs = nregs_ymode;
3351*e4b17023SJohn Marino       return;
3352*e4b17023SJohn Marino     }
3353*e4b17023SJohn Marino 
3354*e4b17023SJohn Marino   /* If registers store different numbers of bits in the different
3355*e4b17023SJohn Marino      modes, we cannot generally form this subreg.  */
3356*e4b17023SJohn Marino   if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3357*e4b17023SJohn Marino       && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3358*e4b17023SJohn Marino       && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3359*e4b17023SJohn Marino       && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3360*e4b17023SJohn Marino     {
3361*e4b17023SJohn Marino       regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3362*e4b17023SJohn Marino       regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3363*e4b17023SJohn Marino       if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3364*e4b17023SJohn Marino 	{
3365*e4b17023SJohn Marino 	  info->representable_p = false;
3366*e4b17023SJohn Marino 	  info->nregs
3367*e4b17023SJohn Marino 	    = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3368*e4b17023SJohn Marino 	  info->offset = offset / regsize_xmode;
3369*e4b17023SJohn Marino 	  return;
3370*e4b17023SJohn Marino 	}
3371*e4b17023SJohn Marino       if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3372*e4b17023SJohn Marino 	{
3373*e4b17023SJohn Marino 	  info->representable_p = false;
3374*e4b17023SJohn Marino 	  info->nregs
3375*e4b17023SJohn Marino 	    = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3376*e4b17023SJohn Marino 	  info->offset = offset / regsize_xmode;
3377*e4b17023SJohn Marino 	  return;
3378*e4b17023SJohn Marino 	}
3379*e4b17023SJohn Marino     }
3380*e4b17023SJohn Marino 
3381*e4b17023SJohn Marino   /* Lowpart subregs are otherwise valid.  */
3382*e4b17023SJohn Marino   if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3383*e4b17023SJohn Marino     {
3384*e4b17023SJohn Marino       info->representable_p = true;
3385*e4b17023SJohn Marino       rknown = true;
3386*e4b17023SJohn Marino 
3387*e4b17023SJohn Marino       if (offset == 0 || nregs_xmode == nregs_ymode)
3388*e4b17023SJohn Marino 	{
3389*e4b17023SJohn Marino 	  info->offset = 0;
3390*e4b17023SJohn Marino 	  info->nregs = nregs_ymode;
3391*e4b17023SJohn Marino 	  return;
3392*e4b17023SJohn Marino 	}
3393*e4b17023SJohn Marino     }
3394*e4b17023SJohn Marino 
3395*e4b17023SJohn Marino   /* This should always pass, otherwise we don't know how to verify
3396*e4b17023SJohn Marino      the constraint.  These conditions may be relaxed but
3397*e4b17023SJohn Marino      subreg_regno_offset would need to be redesigned.  */
3398*e4b17023SJohn Marino   gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3399*e4b17023SJohn Marino   gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3400*e4b17023SJohn Marino 
3401*e4b17023SJohn Marino   if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3402*e4b17023SJohn Marino       && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3403*e4b17023SJohn Marino     {
3404*e4b17023SJohn Marino       HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3405*e4b17023SJohn Marino       HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3406*e4b17023SJohn Marino       HOST_WIDE_INT off_low = offset & (ysize - 1);
3407*e4b17023SJohn Marino       HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3408*e4b17023SJohn Marino       offset = (xsize - ysize - off_high) | off_low;
3409*e4b17023SJohn Marino     }
3410*e4b17023SJohn Marino   /* The XMODE value can be seen as a vector of NREGS_XMODE
3411*e4b17023SJohn Marino      values.  The subreg must represent a lowpart of given field.
3412*e4b17023SJohn Marino      Compute what field it is.  */
3413*e4b17023SJohn Marino   offset_adj = offset;
3414*e4b17023SJohn Marino   offset_adj -= subreg_lowpart_offset (ymode,
3415*e4b17023SJohn Marino 				       mode_for_size (GET_MODE_BITSIZE (xmode)
3416*e4b17023SJohn Marino 						      / nregs_xmode,
3417*e4b17023SJohn Marino 						      MODE_INT, 0));
3418*e4b17023SJohn Marino 
3419*e4b17023SJohn Marino   /* Size of ymode must not be greater than the size of xmode.  */
3420*e4b17023SJohn Marino   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3421*e4b17023SJohn Marino   gcc_assert (mode_multiple != 0);
3422*e4b17023SJohn Marino 
3423*e4b17023SJohn Marino   y_offset = offset / GET_MODE_SIZE (ymode);
3424*e4b17023SJohn Marino   y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3425*e4b17023SJohn Marino   nregs_multiple = nregs_xmode / nregs_ymode;
3426*e4b17023SJohn Marino 
3427*e4b17023SJohn Marino   gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3428*e4b17023SJohn Marino   gcc_assert ((mode_multiple % nregs_multiple) == 0);
3429*e4b17023SJohn Marino 
3430*e4b17023SJohn Marino   if (!rknown)
3431*e4b17023SJohn Marino     {
3432*e4b17023SJohn Marino       info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3433*e4b17023SJohn Marino       rknown = true;
3434*e4b17023SJohn Marino     }
3435*e4b17023SJohn Marino   info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3436*e4b17023SJohn Marino   info->nregs = nregs_ymode;
3437*e4b17023SJohn Marino }
3438*e4b17023SJohn Marino 
3439*e4b17023SJohn Marino /* This function returns the regno offset of a subreg expression.
3440*e4b17023SJohn Marino    xregno - A regno of an inner hard subreg_reg (or what will become one).
3441*e4b17023SJohn Marino    xmode  - The mode of xregno.
3442*e4b17023SJohn Marino    offset - The byte offset.
3443*e4b17023SJohn Marino    ymode  - The mode of a top level SUBREG (or what may become one).
3444*e4b17023SJohn Marino    RETURN - The regno offset which would be used.  */
3445*e4b17023SJohn Marino unsigned int
subreg_regno_offset(unsigned int xregno,enum machine_mode xmode,unsigned int offset,enum machine_mode ymode)3446*e4b17023SJohn Marino subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3447*e4b17023SJohn Marino 		     unsigned int offset, enum machine_mode ymode)
3448*e4b17023SJohn Marino {
3449*e4b17023SJohn Marino   struct subreg_info info;
3450*e4b17023SJohn Marino   subreg_get_info (xregno, xmode, offset, ymode, &info);
3451*e4b17023SJohn Marino   return info.offset;
3452*e4b17023SJohn Marino }
3453*e4b17023SJohn Marino 
3454*e4b17023SJohn Marino /* This function returns true when the offset is representable via
3455*e4b17023SJohn Marino    subreg_offset in the given regno.
3456*e4b17023SJohn Marino    xregno - A regno of an inner hard subreg_reg (or what will become one).
3457*e4b17023SJohn Marino    xmode  - The mode of xregno.
3458*e4b17023SJohn Marino    offset - The byte offset.
3459*e4b17023SJohn Marino    ymode  - The mode of a top level SUBREG (or what may become one).
3460*e4b17023SJohn Marino    RETURN - Whether the offset is representable.  */
3461*e4b17023SJohn Marino bool
subreg_offset_representable_p(unsigned int xregno,enum machine_mode xmode,unsigned int offset,enum machine_mode ymode)3462*e4b17023SJohn Marino subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3463*e4b17023SJohn Marino 			       unsigned int offset, enum machine_mode ymode)
3464*e4b17023SJohn Marino {
3465*e4b17023SJohn Marino   struct subreg_info info;
3466*e4b17023SJohn Marino   subreg_get_info (xregno, xmode, offset, ymode, &info);
3467*e4b17023SJohn Marino   return info.representable_p;
3468*e4b17023SJohn Marino }
3469*e4b17023SJohn Marino 
3470*e4b17023SJohn Marino /* Return the number of a YMODE register to which
3471*e4b17023SJohn Marino 
3472*e4b17023SJohn Marino        (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3473*e4b17023SJohn Marino 
3474*e4b17023SJohn Marino    can be simplified.  Return -1 if the subreg can't be simplified.
3475*e4b17023SJohn Marino 
3476*e4b17023SJohn Marino    XREGNO is a hard register number.  */
3477*e4b17023SJohn Marino 
3478*e4b17023SJohn Marino int
simplify_subreg_regno(unsigned int xregno,enum machine_mode xmode,unsigned int offset,enum machine_mode ymode)3479*e4b17023SJohn Marino simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3480*e4b17023SJohn Marino 		       unsigned int offset, enum machine_mode ymode)
3481*e4b17023SJohn Marino {
3482*e4b17023SJohn Marino   struct subreg_info info;
3483*e4b17023SJohn Marino   unsigned int yregno;
3484*e4b17023SJohn Marino 
3485*e4b17023SJohn Marino #ifdef CANNOT_CHANGE_MODE_CLASS
3486*e4b17023SJohn Marino   /* Give the backend a chance to disallow the mode change.  */
3487*e4b17023SJohn Marino   if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3488*e4b17023SJohn Marino       && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3489*e4b17023SJohn Marino       && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3490*e4b17023SJohn Marino     return -1;
3491*e4b17023SJohn Marino #endif
3492*e4b17023SJohn Marino 
3493*e4b17023SJohn Marino   /* We shouldn't simplify stack-related registers.  */
3494*e4b17023SJohn Marino   if ((!reload_completed || frame_pointer_needed)
3495*e4b17023SJohn Marino       && xregno == FRAME_POINTER_REGNUM)
3496*e4b17023SJohn Marino     return -1;
3497*e4b17023SJohn Marino 
3498*e4b17023SJohn Marino   if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3499*e4b17023SJohn Marino       && xregno == ARG_POINTER_REGNUM)
3500*e4b17023SJohn Marino     return -1;
3501*e4b17023SJohn Marino 
3502*e4b17023SJohn Marino   if (xregno == STACK_POINTER_REGNUM)
3503*e4b17023SJohn Marino     return -1;
3504*e4b17023SJohn Marino 
3505*e4b17023SJohn Marino   /* Try to get the register offset.  */
3506*e4b17023SJohn Marino   subreg_get_info (xregno, xmode, offset, ymode, &info);
3507*e4b17023SJohn Marino   if (!info.representable_p)
3508*e4b17023SJohn Marino     return -1;
3509*e4b17023SJohn Marino 
3510*e4b17023SJohn Marino   /* Make sure that the offsetted register value is in range.  */
3511*e4b17023SJohn Marino   yregno = xregno + info.offset;
3512*e4b17023SJohn Marino   if (!HARD_REGISTER_NUM_P (yregno))
3513*e4b17023SJohn Marino     return -1;
3514*e4b17023SJohn Marino 
3515*e4b17023SJohn Marino   /* See whether (reg:YMODE YREGNO) is valid.
3516*e4b17023SJohn Marino 
3517*e4b17023SJohn Marino      ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3518*e4b17023SJohn Marino      This is a kludge to work around how complex FP arguments are passed
3519*e4b17023SJohn Marino      on IA-64 and should be fixed.  See PR target/49226.  */
3520*e4b17023SJohn Marino   if (!HARD_REGNO_MODE_OK (yregno, ymode)
3521*e4b17023SJohn Marino       && HARD_REGNO_MODE_OK (xregno, xmode))
3522*e4b17023SJohn Marino     return -1;
3523*e4b17023SJohn Marino 
3524*e4b17023SJohn Marino   return (int) yregno;
3525*e4b17023SJohn Marino }
3526*e4b17023SJohn Marino 
3527*e4b17023SJohn Marino /* Return the final regno that a subreg expression refers to.  */
3528*e4b17023SJohn Marino unsigned int
subreg_regno(const_rtx x)3529*e4b17023SJohn Marino subreg_regno (const_rtx x)
3530*e4b17023SJohn Marino {
3531*e4b17023SJohn Marino   unsigned int ret;
3532*e4b17023SJohn Marino   rtx subreg = SUBREG_REG (x);
3533*e4b17023SJohn Marino   int regno = REGNO (subreg);
3534*e4b17023SJohn Marino 
3535*e4b17023SJohn Marino   ret = regno + subreg_regno_offset (regno,
3536*e4b17023SJohn Marino 				     GET_MODE (subreg),
3537*e4b17023SJohn Marino 				     SUBREG_BYTE (x),
3538*e4b17023SJohn Marino 				     GET_MODE (x));
3539*e4b17023SJohn Marino   return ret;
3540*e4b17023SJohn Marino 
3541*e4b17023SJohn Marino }
3542*e4b17023SJohn Marino 
3543*e4b17023SJohn Marino /* Return the number of registers that a subreg expression refers
3544*e4b17023SJohn Marino    to.  */
3545*e4b17023SJohn Marino unsigned int
subreg_nregs(const_rtx x)3546*e4b17023SJohn Marino subreg_nregs (const_rtx x)
3547*e4b17023SJohn Marino {
3548*e4b17023SJohn Marino   return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3549*e4b17023SJohn Marino }
3550*e4b17023SJohn Marino 
3551*e4b17023SJohn Marino /* Return the number of registers that a subreg REG with REGNO
3552*e4b17023SJohn Marino    expression refers to.  This is a copy of the rtlanal.c:subreg_nregs
3553*e4b17023SJohn Marino    changed so that the regno can be passed in. */
3554*e4b17023SJohn Marino 
3555*e4b17023SJohn Marino unsigned int
subreg_nregs_with_regno(unsigned int regno,const_rtx x)3556*e4b17023SJohn Marino subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3557*e4b17023SJohn Marino {
3558*e4b17023SJohn Marino   struct subreg_info info;
3559*e4b17023SJohn Marino   rtx subreg = SUBREG_REG (x);
3560*e4b17023SJohn Marino 
3561*e4b17023SJohn Marino   subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3562*e4b17023SJohn Marino 		   &info);
3563*e4b17023SJohn Marino   return info.nregs;
3564*e4b17023SJohn Marino }
3565*e4b17023SJohn Marino 
3566*e4b17023SJohn Marino 
3567*e4b17023SJohn Marino struct parms_set_data
3568*e4b17023SJohn Marino {
3569*e4b17023SJohn Marino   int nregs;
3570*e4b17023SJohn Marino   HARD_REG_SET regs;
3571*e4b17023SJohn Marino };
3572*e4b17023SJohn Marino 
3573*e4b17023SJohn Marino /* Helper function for noticing stores to parameter registers.  */
3574*e4b17023SJohn Marino static void
parms_set(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)3575*e4b17023SJohn Marino parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3576*e4b17023SJohn Marino {
3577*e4b17023SJohn Marino   struct parms_set_data *const d = (struct parms_set_data *) data;
3578*e4b17023SJohn Marino   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3579*e4b17023SJohn Marino       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3580*e4b17023SJohn Marino     {
3581*e4b17023SJohn Marino       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3582*e4b17023SJohn Marino       d->nregs--;
3583*e4b17023SJohn Marino     }
3584*e4b17023SJohn Marino }
3585*e4b17023SJohn Marino 
3586*e4b17023SJohn Marino /* Look backward for first parameter to be loaded.
3587*e4b17023SJohn Marino    Note that loads of all parameters will not necessarily be
3588*e4b17023SJohn Marino    found if CSE has eliminated some of them (e.g., an argument
3589*e4b17023SJohn Marino    to the outer function is passed down as a parameter).
3590*e4b17023SJohn Marino    Do not skip BOUNDARY.  */
3591*e4b17023SJohn Marino rtx
find_first_parameter_load(rtx call_insn,rtx boundary)3592*e4b17023SJohn Marino find_first_parameter_load (rtx call_insn, rtx boundary)
3593*e4b17023SJohn Marino {
3594*e4b17023SJohn Marino   struct parms_set_data parm;
3595*e4b17023SJohn Marino   rtx p, before, first_set;
3596*e4b17023SJohn Marino 
3597*e4b17023SJohn Marino   /* Since different machines initialize their parameter registers
3598*e4b17023SJohn Marino      in different orders, assume nothing.  Collect the set of all
3599*e4b17023SJohn Marino      parameter registers.  */
3600*e4b17023SJohn Marino   CLEAR_HARD_REG_SET (parm.regs);
3601*e4b17023SJohn Marino   parm.nregs = 0;
3602*e4b17023SJohn Marino   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3603*e4b17023SJohn Marino     if (GET_CODE (XEXP (p, 0)) == USE
3604*e4b17023SJohn Marino 	&& REG_P (XEXP (XEXP (p, 0), 0)))
3605*e4b17023SJohn Marino       {
3606*e4b17023SJohn Marino 	gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3607*e4b17023SJohn Marino 
3608*e4b17023SJohn Marino 	/* We only care about registers which can hold function
3609*e4b17023SJohn Marino 	   arguments.  */
3610*e4b17023SJohn Marino 	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3611*e4b17023SJohn Marino 	  continue;
3612*e4b17023SJohn Marino 
3613*e4b17023SJohn Marino 	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3614*e4b17023SJohn Marino 	parm.nregs++;
3615*e4b17023SJohn Marino       }
3616*e4b17023SJohn Marino   before = call_insn;
3617*e4b17023SJohn Marino   first_set = call_insn;
3618*e4b17023SJohn Marino 
3619*e4b17023SJohn Marino   /* Search backward for the first set of a register in this set.  */
3620*e4b17023SJohn Marino   while (parm.nregs && before != boundary)
3621*e4b17023SJohn Marino     {
3622*e4b17023SJohn Marino       before = PREV_INSN (before);
3623*e4b17023SJohn Marino 
3624*e4b17023SJohn Marino       /* It is possible that some loads got CSEed from one call to
3625*e4b17023SJohn Marino          another.  Stop in that case.  */
3626*e4b17023SJohn Marino       if (CALL_P (before))
3627*e4b17023SJohn Marino 	break;
3628*e4b17023SJohn Marino 
3629*e4b17023SJohn Marino       /* Our caller needs either ensure that we will find all sets
3630*e4b17023SJohn Marino          (in case code has not been optimized yet), or take care
3631*e4b17023SJohn Marino          for possible labels in a way by setting boundary to preceding
3632*e4b17023SJohn Marino          CODE_LABEL.  */
3633*e4b17023SJohn Marino       if (LABEL_P (before))
3634*e4b17023SJohn Marino 	{
3635*e4b17023SJohn Marino 	  gcc_assert (before == boundary);
3636*e4b17023SJohn Marino 	  break;
3637*e4b17023SJohn Marino 	}
3638*e4b17023SJohn Marino 
3639*e4b17023SJohn Marino       if (INSN_P (before))
3640*e4b17023SJohn Marino 	{
3641*e4b17023SJohn Marino 	  int nregs_old = parm.nregs;
3642*e4b17023SJohn Marino 	  note_stores (PATTERN (before), parms_set, &parm);
3643*e4b17023SJohn Marino 	  /* If we found something that did not set a parameter reg,
3644*e4b17023SJohn Marino 	     we're done.  Do not keep going, as that might result
3645*e4b17023SJohn Marino 	     in hoisting an insn before the setting of a pseudo
3646*e4b17023SJohn Marino 	     that is used by the hoisted insn. */
3647*e4b17023SJohn Marino 	  if (nregs_old != parm.nregs)
3648*e4b17023SJohn Marino 	    first_set = before;
3649*e4b17023SJohn Marino 	  else
3650*e4b17023SJohn Marino 	    break;
3651*e4b17023SJohn Marino 	}
3652*e4b17023SJohn Marino     }
3653*e4b17023SJohn Marino   return first_set;
3654*e4b17023SJohn Marino }
3655*e4b17023SJohn Marino 
3656*e4b17023SJohn Marino /* Return true if we should avoid inserting code between INSN and preceding
3657*e4b17023SJohn Marino    call instruction.  */
3658*e4b17023SJohn Marino 
3659*e4b17023SJohn Marino bool
keep_with_call_p(const_rtx insn)3660*e4b17023SJohn Marino keep_with_call_p (const_rtx insn)
3661*e4b17023SJohn Marino {
3662*e4b17023SJohn Marino   rtx set;
3663*e4b17023SJohn Marino 
3664*e4b17023SJohn Marino   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3665*e4b17023SJohn Marino     {
3666*e4b17023SJohn Marino       if (REG_P (SET_DEST (set))
3667*e4b17023SJohn Marino 	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3668*e4b17023SJohn Marino 	  && fixed_regs[REGNO (SET_DEST (set))]
3669*e4b17023SJohn Marino 	  && general_operand (SET_SRC (set), VOIDmode))
3670*e4b17023SJohn Marino 	return true;
3671*e4b17023SJohn Marino       if (REG_P (SET_SRC (set))
3672*e4b17023SJohn Marino 	  && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
3673*e4b17023SJohn Marino 	  && REG_P (SET_DEST (set))
3674*e4b17023SJohn Marino 	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3675*e4b17023SJohn Marino 	return true;
3676*e4b17023SJohn Marino       /* There may be a stack pop just after the call and before the store
3677*e4b17023SJohn Marino 	 of the return register.  Search for the actual store when deciding
3678*e4b17023SJohn Marino 	 if we can break or not.  */
3679*e4b17023SJohn Marino       if (SET_DEST (set) == stack_pointer_rtx)
3680*e4b17023SJohn Marino 	{
3681*e4b17023SJohn Marino 	  /* This CONST_CAST is okay because next_nonnote_insn just
3682*e4b17023SJohn Marino 	     returns its argument and we assign it to a const_rtx
3683*e4b17023SJohn Marino 	     variable.  */
3684*e4b17023SJohn Marino 	  const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3685*e4b17023SJohn Marino 	  if (i2 && keep_with_call_p (i2))
3686*e4b17023SJohn Marino 	    return true;
3687*e4b17023SJohn Marino 	}
3688*e4b17023SJohn Marino     }
3689*e4b17023SJohn Marino   return false;
3690*e4b17023SJohn Marino }
3691*e4b17023SJohn Marino 
3692*e4b17023SJohn Marino /* Return true if LABEL is a target of JUMP_INSN.  This applies only
3693*e4b17023SJohn Marino    to non-complex jumps.  That is, direct unconditional, conditional,
3694*e4b17023SJohn Marino    and tablejumps, but not computed jumps or returns.  It also does
3695*e4b17023SJohn Marino    not apply to the fallthru case of a conditional jump.  */
3696*e4b17023SJohn Marino 
3697*e4b17023SJohn Marino bool
label_is_jump_target_p(const_rtx label,const_rtx jump_insn)3698*e4b17023SJohn Marino label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3699*e4b17023SJohn Marino {
3700*e4b17023SJohn Marino   rtx tmp = JUMP_LABEL (jump_insn);
3701*e4b17023SJohn Marino 
3702*e4b17023SJohn Marino   if (label == tmp)
3703*e4b17023SJohn Marino     return true;
3704*e4b17023SJohn Marino 
3705*e4b17023SJohn Marino   if (tablejump_p (jump_insn, NULL, &tmp))
3706*e4b17023SJohn Marino     {
3707*e4b17023SJohn Marino       rtvec vec = XVEC (PATTERN (tmp),
3708*e4b17023SJohn Marino 			GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3709*e4b17023SJohn Marino       int i, veclen = GET_NUM_ELEM (vec);
3710*e4b17023SJohn Marino 
3711*e4b17023SJohn Marino       for (i = 0; i < veclen; ++i)
3712*e4b17023SJohn Marino 	if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3713*e4b17023SJohn Marino 	  return true;
3714*e4b17023SJohn Marino     }
3715*e4b17023SJohn Marino 
3716*e4b17023SJohn Marino   if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3717*e4b17023SJohn Marino     return true;
3718*e4b17023SJohn Marino 
3719*e4b17023SJohn Marino   return false;
3720*e4b17023SJohn Marino }
3721*e4b17023SJohn Marino 
3722*e4b17023SJohn Marino 
3723*e4b17023SJohn Marino /* Return an estimate of the cost of computing rtx X.
3724*e4b17023SJohn Marino    One use is in cse, to decide which expression to keep in the hash table.
3725*e4b17023SJohn Marino    Another is in rtl generation, to pick the cheapest way to multiply.
3726*e4b17023SJohn Marino    Other uses like the latter are expected in the future.
3727*e4b17023SJohn Marino 
3728*e4b17023SJohn Marino    X appears as operand OPNO in an expression with code OUTER_CODE.
3729*e4b17023SJohn Marino    SPEED specifies whether costs optimized for speed or size should
3730*e4b17023SJohn Marino    be returned.  */
3731*e4b17023SJohn Marino 
3732*e4b17023SJohn Marino int
rtx_cost(rtx x,enum rtx_code outer_code,int opno,bool speed)3733*e4b17023SJohn Marino rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
3734*e4b17023SJohn Marino {
3735*e4b17023SJohn Marino   int i, j;
3736*e4b17023SJohn Marino   enum rtx_code code;
3737*e4b17023SJohn Marino   const char *fmt;
3738*e4b17023SJohn Marino   int total;
3739*e4b17023SJohn Marino 
3740*e4b17023SJohn Marino   if (x == 0)
3741*e4b17023SJohn Marino     return 0;
3742*e4b17023SJohn Marino 
3743*e4b17023SJohn Marino   /* Compute the default costs of certain things.
3744*e4b17023SJohn Marino      Note that targetm.rtx_costs can override the defaults.  */
3745*e4b17023SJohn Marino 
3746*e4b17023SJohn Marino   code = GET_CODE (x);
3747*e4b17023SJohn Marino   switch (code)
3748*e4b17023SJohn Marino     {
3749*e4b17023SJohn Marino     case MULT:
3750*e4b17023SJohn Marino       total = COSTS_N_INSNS (5);
3751*e4b17023SJohn Marino       break;
3752*e4b17023SJohn Marino     case DIV:
3753*e4b17023SJohn Marino     case UDIV:
3754*e4b17023SJohn Marino     case MOD:
3755*e4b17023SJohn Marino     case UMOD:
3756*e4b17023SJohn Marino       total = COSTS_N_INSNS (7);
3757*e4b17023SJohn Marino       break;
3758*e4b17023SJohn Marino     case USE:
3759*e4b17023SJohn Marino       /* Used in combine.c as a marker.  */
3760*e4b17023SJohn Marino       total = 0;
3761*e4b17023SJohn Marino       break;
3762*e4b17023SJohn Marino     default:
3763*e4b17023SJohn Marino       total = COSTS_N_INSNS (1);
3764*e4b17023SJohn Marino     }
3765*e4b17023SJohn Marino 
3766*e4b17023SJohn Marino   switch (code)
3767*e4b17023SJohn Marino     {
3768*e4b17023SJohn Marino     case REG:
3769*e4b17023SJohn Marino       return 0;
3770*e4b17023SJohn Marino 
3771*e4b17023SJohn Marino     case SUBREG:
3772*e4b17023SJohn Marino       total = 0;
3773*e4b17023SJohn Marino       /* If we can't tie these modes, make this expensive.  The larger
3774*e4b17023SJohn Marino 	 the mode, the more expensive it is.  */
3775*e4b17023SJohn Marino       if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3776*e4b17023SJohn Marino 	return COSTS_N_INSNS (2
3777*e4b17023SJohn Marino 			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3778*e4b17023SJohn Marino       break;
3779*e4b17023SJohn Marino 
3780*e4b17023SJohn Marino     default:
3781*e4b17023SJohn Marino       if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
3782*e4b17023SJohn Marino 	return total;
3783*e4b17023SJohn Marino       break;
3784*e4b17023SJohn Marino     }
3785*e4b17023SJohn Marino 
3786*e4b17023SJohn Marino   /* Sum the costs of the sub-rtx's, plus cost of this operation,
3787*e4b17023SJohn Marino      which is already in total.  */
3788*e4b17023SJohn Marino 
3789*e4b17023SJohn Marino   fmt = GET_RTX_FORMAT (code);
3790*e4b17023SJohn Marino   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3791*e4b17023SJohn Marino     if (fmt[i] == 'e')
3792*e4b17023SJohn Marino       total += rtx_cost (XEXP (x, i), code, i, speed);
3793*e4b17023SJohn Marino     else if (fmt[i] == 'E')
3794*e4b17023SJohn Marino       for (j = 0; j < XVECLEN (x, i); j++)
3795*e4b17023SJohn Marino 	total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
3796*e4b17023SJohn Marino 
3797*e4b17023SJohn Marino   return total;
3798*e4b17023SJohn Marino }
3799*e4b17023SJohn Marino 
3800*e4b17023SJohn Marino /* Fill in the structure C with information about both speed and size rtx
3801*e4b17023SJohn Marino    costs for X, which is operand OPNO in an expression with code OUTER.  */
3802*e4b17023SJohn Marino 
3803*e4b17023SJohn Marino void
get_full_rtx_cost(rtx x,enum rtx_code outer,int opno,struct full_rtx_costs * c)3804*e4b17023SJohn Marino get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
3805*e4b17023SJohn Marino 		   struct full_rtx_costs *c)
3806*e4b17023SJohn Marino {
3807*e4b17023SJohn Marino   c->speed = rtx_cost (x, outer, opno, true);
3808*e4b17023SJohn Marino   c->size = rtx_cost (x, outer, opno, false);
3809*e4b17023SJohn Marino }
3810*e4b17023SJohn Marino 
3811*e4b17023SJohn Marino 
3812*e4b17023SJohn Marino /* Return cost of address expression X.
3813*e4b17023SJohn Marino    Expect that X is properly formed address reference.
3814*e4b17023SJohn Marino 
3815*e4b17023SJohn Marino    SPEED parameter specify whether costs optimized for speed or size should
3816*e4b17023SJohn Marino    be returned.  */
3817*e4b17023SJohn Marino 
3818*e4b17023SJohn Marino int
address_cost(rtx x,enum machine_mode mode,addr_space_t as,bool speed)3819*e4b17023SJohn Marino address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
3820*e4b17023SJohn Marino {
3821*e4b17023SJohn Marino   /* We may be asked for cost of various unusual addresses, such as operands
3822*e4b17023SJohn Marino      of push instruction.  It is not worthwhile to complicate writing
3823*e4b17023SJohn Marino      of the target hook by such cases.  */
3824*e4b17023SJohn Marino 
3825*e4b17023SJohn Marino   if (!memory_address_addr_space_p (mode, x, as))
3826*e4b17023SJohn Marino     return 1000;
3827*e4b17023SJohn Marino 
3828*e4b17023SJohn Marino   return targetm.address_cost (x, speed);
3829*e4b17023SJohn Marino }
3830*e4b17023SJohn Marino 
3831*e4b17023SJohn Marino /* If the target doesn't override, compute the cost as with arithmetic.  */
3832*e4b17023SJohn Marino 
3833*e4b17023SJohn Marino int
default_address_cost(rtx x,bool speed)3834*e4b17023SJohn Marino default_address_cost (rtx x, bool speed)
3835*e4b17023SJohn Marino {
3836*e4b17023SJohn Marino   return rtx_cost (x, MEM, 0, speed);
3837*e4b17023SJohn Marino }
3838*e4b17023SJohn Marino 
3839*e4b17023SJohn Marino 
3840*e4b17023SJohn Marino unsigned HOST_WIDE_INT
nonzero_bits(const_rtx x,enum machine_mode mode)3841*e4b17023SJohn Marino nonzero_bits (const_rtx x, enum machine_mode mode)
3842*e4b17023SJohn Marino {
3843*e4b17023SJohn Marino   return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3844*e4b17023SJohn Marino }
3845*e4b17023SJohn Marino 
3846*e4b17023SJohn Marino unsigned int
num_sign_bit_copies(const_rtx x,enum machine_mode mode)3847*e4b17023SJohn Marino num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3848*e4b17023SJohn Marino {
3849*e4b17023SJohn Marino   return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3850*e4b17023SJohn Marino }
3851*e4b17023SJohn Marino 
3852*e4b17023SJohn Marino /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3853*e4b17023SJohn Marino    It avoids exponential behavior in nonzero_bits1 when X has
3854*e4b17023SJohn Marino    identical subexpressions on the first or the second level.  */
3855*e4b17023SJohn Marino 
3856*e4b17023SJohn Marino static unsigned HOST_WIDE_INT
cached_nonzero_bits(const_rtx x,enum machine_mode mode,const_rtx known_x,enum machine_mode known_mode,unsigned HOST_WIDE_INT known_ret)3857*e4b17023SJohn Marino cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3858*e4b17023SJohn Marino 		     enum machine_mode known_mode,
3859*e4b17023SJohn Marino 		     unsigned HOST_WIDE_INT known_ret)
3860*e4b17023SJohn Marino {
3861*e4b17023SJohn Marino   if (x == known_x && mode == known_mode)
3862*e4b17023SJohn Marino     return known_ret;
3863*e4b17023SJohn Marino 
3864*e4b17023SJohn Marino   /* Try to find identical subexpressions.  If found call
3865*e4b17023SJohn Marino      nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3866*e4b17023SJohn Marino      precomputed value for the subexpression as KNOWN_RET.  */
3867*e4b17023SJohn Marino 
3868*e4b17023SJohn Marino   if (ARITHMETIC_P (x))
3869*e4b17023SJohn Marino     {
3870*e4b17023SJohn Marino       rtx x0 = XEXP (x, 0);
3871*e4b17023SJohn Marino       rtx x1 = XEXP (x, 1);
3872*e4b17023SJohn Marino 
3873*e4b17023SJohn Marino       /* Check the first level.  */
3874*e4b17023SJohn Marino       if (x0 == x1)
3875*e4b17023SJohn Marino 	return nonzero_bits1 (x, mode, x0, mode,
3876*e4b17023SJohn Marino 			      cached_nonzero_bits (x0, mode, known_x,
3877*e4b17023SJohn Marino 						   known_mode, known_ret));
3878*e4b17023SJohn Marino 
3879*e4b17023SJohn Marino       /* Check the second level.  */
3880*e4b17023SJohn Marino       if (ARITHMETIC_P (x0)
3881*e4b17023SJohn Marino 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3882*e4b17023SJohn Marino 	return nonzero_bits1 (x, mode, x1, mode,
3883*e4b17023SJohn Marino 			      cached_nonzero_bits (x1, mode, known_x,
3884*e4b17023SJohn Marino 						   known_mode, known_ret));
3885*e4b17023SJohn Marino 
3886*e4b17023SJohn Marino       if (ARITHMETIC_P (x1)
3887*e4b17023SJohn Marino 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3888*e4b17023SJohn Marino 	return nonzero_bits1 (x, mode, x0, mode,
3889*e4b17023SJohn Marino 			      cached_nonzero_bits (x0, mode, known_x,
3890*e4b17023SJohn Marino 						   known_mode, known_ret));
3891*e4b17023SJohn Marino     }
3892*e4b17023SJohn Marino 
3893*e4b17023SJohn Marino   return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3894*e4b17023SJohn Marino }
3895*e4b17023SJohn Marino 
3896*e4b17023SJohn Marino /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3897*e4b17023SJohn Marino    We don't let nonzero_bits recur into num_sign_bit_copies, because that
3898*e4b17023SJohn Marino    is less useful.  We can't allow both, because that results in exponential
3899*e4b17023SJohn Marino    run time recursion.  There is a nullstone testcase that triggered
3900*e4b17023SJohn Marino    this.  This macro avoids accidental uses of num_sign_bit_copies.  */
3901*e4b17023SJohn Marino #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3902*e4b17023SJohn Marino 
3903*e4b17023SJohn Marino /* Given an expression, X, compute which bits in X can be nonzero.
3904*e4b17023SJohn Marino    We don't care about bits outside of those defined in MODE.
3905*e4b17023SJohn Marino 
3906*e4b17023SJohn Marino    For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3907*e4b17023SJohn Marino    an arithmetic operation, we can do better.  */
3908*e4b17023SJohn Marino 
3909*e4b17023SJohn Marino static unsigned HOST_WIDE_INT
nonzero_bits1(const_rtx x,enum machine_mode mode,const_rtx known_x,enum machine_mode known_mode,unsigned HOST_WIDE_INT known_ret)3910*e4b17023SJohn Marino nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3911*e4b17023SJohn Marino 	       enum machine_mode known_mode,
3912*e4b17023SJohn Marino 	       unsigned HOST_WIDE_INT known_ret)
3913*e4b17023SJohn Marino {
3914*e4b17023SJohn Marino   unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3915*e4b17023SJohn Marino   unsigned HOST_WIDE_INT inner_nz;
3916*e4b17023SJohn Marino   enum rtx_code code;
3917*e4b17023SJohn Marino   enum machine_mode inner_mode;
3918*e4b17023SJohn Marino   unsigned int mode_width = GET_MODE_PRECISION (mode);
3919*e4b17023SJohn Marino 
3920*e4b17023SJohn Marino   /* For floating-point and vector values, assume all bits are needed.  */
3921*e4b17023SJohn Marino   if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3922*e4b17023SJohn Marino       || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
3923*e4b17023SJohn Marino     return nonzero;
3924*e4b17023SJohn Marino 
3925*e4b17023SJohn Marino   /* If X is wider than MODE, use its mode instead.  */
3926*e4b17023SJohn Marino   if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
3927*e4b17023SJohn Marino     {
3928*e4b17023SJohn Marino       mode = GET_MODE (x);
3929*e4b17023SJohn Marino       nonzero = GET_MODE_MASK (mode);
3930*e4b17023SJohn Marino       mode_width = GET_MODE_PRECISION (mode);
3931*e4b17023SJohn Marino     }
3932*e4b17023SJohn Marino 
3933*e4b17023SJohn Marino   if (mode_width > HOST_BITS_PER_WIDE_INT)
3934*e4b17023SJohn Marino     /* Our only callers in this case look for single bit values.  So
3935*e4b17023SJohn Marino        just return the mode mask.  Those tests will then be false.  */
3936*e4b17023SJohn Marino     return nonzero;
3937*e4b17023SJohn Marino 
3938*e4b17023SJohn Marino #ifndef WORD_REGISTER_OPERATIONS
3939*e4b17023SJohn Marino   /* If MODE is wider than X, but both are a single word for both the host
3940*e4b17023SJohn Marino      and target machines, we can compute this from which bits of the
3941*e4b17023SJohn Marino      object might be nonzero in its own mode, taking into account the fact
3942*e4b17023SJohn Marino      that on many CISC machines, accessing an object in a wider mode
3943*e4b17023SJohn Marino      causes the high-order bits to become undefined.  So they are
3944*e4b17023SJohn Marino      not known to be zero.  */
3945*e4b17023SJohn Marino 
3946*e4b17023SJohn Marino   if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3947*e4b17023SJohn Marino       && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
3948*e4b17023SJohn Marino       && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3949*e4b17023SJohn Marino       && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
3950*e4b17023SJohn Marino     {
3951*e4b17023SJohn Marino       nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3952*e4b17023SJohn Marino 				      known_x, known_mode, known_ret);
3953*e4b17023SJohn Marino       nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3954*e4b17023SJohn Marino       return nonzero;
3955*e4b17023SJohn Marino     }
3956*e4b17023SJohn Marino #endif
3957*e4b17023SJohn Marino 
3958*e4b17023SJohn Marino   code = GET_CODE (x);
3959*e4b17023SJohn Marino   switch (code)
3960*e4b17023SJohn Marino     {
3961*e4b17023SJohn Marino     case REG:
3962*e4b17023SJohn Marino #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3963*e4b17023SJohn Marino       /* If pointers extend unsigned and this is a pointer in Pmode, say that
3964*e4b17023SJohn Marino 	 all the bits above ptr_mode are known to be zero.  */
3965*e4b17023SJohn Marino       /* As we do not know which address space the pointer is refering to,
3966*e4b17023SJohn Marino 	 we can do this only if the target does not support different pointer
3967*e4b17023SJohn Marino 	 or address modes depending on the address space.  */
3968*e4b17023SJohn Marino       if (target_default_pointer_address_modes_p ()
3969*e4b17023SJohn Marino 	  && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3970*e4b17023SJohn Marino 	  && REG_POINTER (x))
3971*e4b17023SJohn Marino 	nonzero &= GET_MODE_MASK (ptr_mode);
3972*e4b17023SJohn Marino #endif
3973*e4b17023SJohn Marino 
3974*e4b17023SJohn Marino       /* Include declared information about alignment of pointers.  */
3975*e4b17023SJohn Marino       /* ??? We don't properly preserve REG_POINTER changes across
3976*e4b17023SJohn Marino 	 pointer-to-integer casts, so we can't trust it except for
3977*e4b17023SJohn Marino 	 things that we know must be pointers.  See execute/960116-1.c.  */
3978*e4b17023SJohn Marino       if ((x == stack_pointer_rtx
3979*e4b17023SJohn Marino 	   || x == frame_pointer_rtx
3980*e4b17023SJohn Marino 	   || x == arg_pointer_rtx)
3981*e4b17023SJohn Marino 	  && REGNO_POINTER_ALIGN (REGNO (x)))
3982*e4b17023SJohn Marino 	{
3983*e4b17023SJohn Marino 	  unsigned HOST_WIDE_INT alignment
3984*e4b17023SJohn Marino 	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3985*e4b17023SJohn Marino 
3986*e4b17023SJohn Marino #ifdef PUSH_ROUNDING
3987*e4b17023SJohn Marino 	  /* If PUSH_ROUNDING is defined, it is possible for the
3988*e4b17023SJohn Marino 	     stack to be momentarily aligned only to that amount,
3989*e4b17023SJohn Marino 	     so we pick the least alignment.  */
3990*e4b17023SJohn Marino 	  if (x == stack_pointer_rtx && PUSH_ARGS)
3991*e4b17023SJohn Marino 	    alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3992*e4b17023SJohn Marino 			     alignment);
3993*e4b17023SJohn Marino #endif
3994*e4b17023SJohn Marino 
3995*e4b17023SJohn Marino 	  nonzero &= ~(alignment - 1);
3996*e4b17023SJohn Marino 	}
3997*e4b17023SJohn Marino 
3998*e4b17023SJohn Marino       {
3999*e4b17023SJohn Marino 	unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4000*e4b17023SJohn Marino 	rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
4001*e4b17023SJohn Marino 					      known_mode, known_ret,
4002*e4b17023SJohn Marino 					      &nonzero_for_hook);
4003*e4b17023SJohn Marino 
4004*e4b17023SJohn Marino 	if (new_rtx)
4005*e4b17023SJohn Marino 	  nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
4006*e4b17023SJohn Marino 						   known_mode, known_ret);
4007*e4b17023SJohn Marino 
4008*e4b17023SJohn Marino 	return nonzero_for_hook;
4009*e4b17023SJohn Marino       }
4010*e4b17023SJohn Marino 
4011*e4b17023SJohn Marino     case CONST_INT:
4012*e4b17023SJohn Marino #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4013*e4b17023SJohn Marino       /* If X is negative in MODE, sign-extend the value.  */
4014*e4b17023SJohn Marino       if (INTVAL (x) > 0
4015*e4b17023SJohn Marino 	  && mode_width < BITS_PER_WORD
4016*e4b17023SJohn Marino 	  && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4017*e4b17023SJohn Marino 	     != 0)
4018*e4b17023SJohn Marino 	return UINTVAL (x) | ((unsigned HOST_WIDE_INT) (-1) << mode_width);
4019*e4b17023SJohn Marino #endif
4020*e4b17023SJohn Marino 
4021*e4b17023SJohn Marino       return UINTVAL (x);
4022*e4b17023SJohn Marino 
4023*e4b17023SJohn Marino     case MEM:
4024*e4b17023SJohn Marino #ifdef LOAD_EXTEND_OP
4025*e4b17023SJohn Marino       /* In many, if not most, RISC machines, reading a byte from memory
4026*e4b17023SJohn Marino 	 zeros the rest of the register.  Noticing that fact saves a lot
4027*e4b17023SJohn Marino 	 of extra zero-extends.  */
4028*e4b17023SJohn Marino       if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4029*e4b17023SJohn Marino 	nonzero &= GET_MODE_MASK (GET_MODE (x));
4030*e4b17023SJohn Marino #endif
4031*e4b17023SJohn Marino       break;
4032*e4b17023SJohn Marino 
4033*e4b17023SJohn Marino     case EQ:  case NE:
4034*e4b17023SJohn Marino     case UNEQ:  case LTGT:
4035*e4b17023SJohn Marino     case GT:  case GTU:  case UNGT:
4036*e4b17023SJohn Marino     case LT:  case LTU:  case UNLT:
4037*e4b17023SJohn Marino     case GE:  case GEU:  case UNGE:
4038*e4b17023SJohn Marino     case LE:  case LEU:  case UNLE:
4039*e4b17023SJohn Marino     case UNORDERED: case ORDERED:
4040*e4b17023SJohn Marino       /* If this produces an integer result, we know which bits are set.
4041*e4b17023SJohn Marino 	 Code here used to clear bits outside the mode of X, but that is
4042*e4b17023SJohn Marino 	 now done above.  */
4043*e4b17023SJohn Marino       /* Mind that MODE is the mode the caller wants to look at this
4044*e4b17023SJohn Marino 	 operation in, and not the actual operation mode.  We can wind
4045*e4b17023SJohn Marino 	 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4046*e4b17023SJohn Marino 	 that describes the results of a vector compare.  */
4047*e4b17023SJohn Marino       if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
4048*e4b17023SJohn Marino 	  && mode_width <= HOST_BITS_PER_WIDE_INT)
4049*e4b17023SJohn Marino 	nonzero = STORE_FLAG_VALUE;
4050*e4b17023SJohn Marino       break;
4051*e4b17023SJohn Marino 
4052*e4b17023SJohn Marino     case NEG:
4053*e4b17023SJohn Marino #if 0
4054*e4b17023SJohn Marino       /* Disabled to avoid exponential mutual recursion between nonzero_bits
4055*e4b17023SJohn Marino 	 and num_sign_bit_copies.  */
4056*e4b17023SJohn Marino       if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4057*e4b17023SJohn Marino 	  == GET_MODE_PRECISION (GET_MODE (x)))
4058*e4b17023SJohn Marino 	nonzero = 1;
4059*e4b17023SJohn Marino #endif
4060*e4b17023SJohn Marino 
4061*e4b17023SJohn Marino       if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
4062*e4b17023SJohn Marino 	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4063*e4b17023SJohn Marino       break;
4064*e4b17023SJohn Marino 
4065*e4b17023SJohn Marino     case ABS:
4066*e4b17023SJohn Marino #if 0
4067*e4b17023SJohn Marino       /* Disabled to avoid exponential mutual recursion between nonzero_bits
4068*e4b17023SJohn Marino 	 and num_sign_bit_copies.  */
4069*e4b17023SJohn Marino       if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4070*e4b17023SJohn Marino 	  == GET_MODE_PRECISION (GET_MODE (x)))
4071*e4b17023SJohn Marino 	nonzero = 1;
4072*e4b17023SJohn Marino #endif
4073*e4b17023SJohn Marino       break;
4074*e4b17023SJohn Marino 
4075*e4b17023SJohn Marino     case TRUNCATE:
4076*e4b17023SJohn Marino       nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4077*e4b17023SJohn Marino 				       known_x, known_mode, known_ret)
4078*e4b17023SJohn Marino 		  & GET_MODE_MASK (mode));
4079*e4b17023SJohn Marino       break;
4080*e4b17023SJohn Marino 
4081*e4b17023SJohn Marino     case ZERO_EXTEND:
4082*e4b17023SJohn Marino       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4083*e4b17023SJohn Marino 				      known_x, known_mode, known_ret);
4084*e4b17023SJohn Marino       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4085*e4b17023SJohn Marino 	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4086*e4b17023SJohn Marino       break;
4087*e4b17023SJohn Marino 
4088*e4b17023SJohn Marino     case SIGN_EXTEND:
4089*e4b17023SJohn Marino       /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4090*e4b17023SJohn Marino 	 Otherwise, show all the bits in the outer mode but not the inner
4091*e4b17023SJohn Marino 	 may be nonzero.  */
4092*e4b17023SJohn Marino       inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4093*e4b17023SJohn Marino 				      known_x, known_mode, known_ret);
4094*e4b17023SJohn Marino       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4095*e4b17023SJohn Marino 	{
4096*e4b17023SJohn Marino 	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4097*e4b17023SJohn Marino 	  if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
4098*e4b17023SJohn Marino 	    inner_nz |= (GET_MODE_MASK (mode)
4099*e4b17023SJohn Marino 			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4100*e4b17023SJohn Marino 	}
4101*e4b17023SJohn Marino 
4102*e4b17023SJohn Marino       nonzero &= inner_nz;
4103*e4b17023SJohn Marino       break;
4104*e4b17023SJohn Marino 
4105*e4b17023SJohn Marino     case AND:
4106*e4b17023SJohn Marino       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4107*e4b17023SJohn Marino 				       known_x, known_mode, known_ret)
4108*e4b17023SJohn Marino       		 & cached_nonzero_bits (XEXP (x, 1), mode,
4109*e4b17023SJohn Marino 					known_x, known_mode, known_ret);
4110*e4b17023SJohn Marino       break;
4111*e4b17023SJohn Marino 
4112*e4b17023SJohn Marino     case XOR:   case IOR:
4113*e4b17023SJohn Marino     case UMIN:  case UMAX:  case SMIN:  case SMAX:
4114*e4b17023SJohn Marino       {
4115*e4b17023SJohn Marino 	unsigned HOST_WIDE_INT nonzero0
4116*e4b17023SJohn Marino 	   = cached_nonzero_bits (XEXP (x, 0), mode,
4117*e4b17023SJohn Marino 				  known_x, known_mode, known_ret);
4118*e4b17023SJohn Marino 
4119*e4b17023SJohn Marino 	/* Don't call nonzero_bits for the second time if it cannot change
4120*e4b17023SJohn Marino 	   anything.  */
4121*e4b17023SJohn Marino 	if ((nonzero & nonzero0) != nonzero)
4122*e4b17023SJohn Marino 	  nonzero &= nonzero0
4123*e4b17023SJohn Marino       		     | cached_nonzero_bits (XEXP (x, 1), mode,
4124*e4b17023SJohn Marino 					    known_x, known_mode, known_ret);
4125*e4b17023SJohn Marino       }
4126*e4b17023SJohn Marino       break;
4127*e4b17023SJohn Marino 
4128*e4b17023SJohn Marino     case PLUS:  case MINUS:
4129*e4b17023SJohn Marino     case MULT:
4130*e4b17023SJohn Marino     case DIV:   case UDIV:
4131*e4b17023SJohn Marino     case MOD:   case UMOD:
4132*e4b17023SJohn Marino       /* We can apply the rules of arithmetic to compute the number of
4133*e4b17023SJohn Marino 	 high- and low-order zero bits of these operations.  We start by
4134*e4b17023SJohn Marino 	 computing the width (position of the highest-order nonzero bit)
4135*e4b17023SJohn Marino 	 and the number of low-order zero bits for each value.  */
4136*e4b17023SJohn Marino       {
4137*e4b17023SJohn Marino 	unsigned HOST_WIDE_INT nz0
4138*e4b17023SJohn Marino 	  = cached_nonzero_bits (XEXP (x, 0), mode,
4139*e4b17023SJohn Marino 				 known_x, known_mode, known_ret);
4140*e4b17023SJohn Marino 	unsigned HOST_WIDE_INT nz1
4141*e4b17023SJohn Marino 	  = cached_nonzero_bits (XEXP (x, 1), mode,
4142*e4b17023SJohn Marino 				 known_x, known_mode, known_ret);
4143*e4b17023SJohn Marino 	int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
4144*e4b17023SJohn Marino 	int width0 = floor_log2 (nz0) + 1;
4145*e4b17023SJohn Marino 	int width1 = floor_log2 (nz1) + 1;
4146*e4b17023SJohn Marino 	int low0 = floor_log2 (nz0 & -nz0);
4147*e4b17023SJohn Marino 	int low1 = floor_log2 (nz1 & -nz1);
4148*e4b17023SJohn Marino 	unsigned HOST_WIDE_INT op0_maybe_minusp
4149*e4b17023SJohn Marino 	  = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4150*e4b17023SJohn Marino 	unsigned HOST_WIDE_INT op1_maybe_minusp
4151*e4b17023SJohn Marino 	  = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4152*e4b17023SJohn Marino 	unsigned int result_width = mode_width;
4153*e4b17023SJohn Marino 	int result_low = 0;
4154*e4b17023SJohn Marino 
4155*e4b17023SJohn Marino 	switch (code)
4156*e4b17023SJohn Marino 	  {
4157*e4b17023SJohn Marino 	  case PLUS:
4158*e4b17023SJohn Marino 	    result_width = MAX (width0, width1) + 1;
4159*e4b17023SJohn Marino 	    result_low = MIN (low0, low1);
4160*e4b17023SJohn Marino 	    break;
4161*e4b17023SJohn Marino 	  case MINUS:
4162*e4b17023SJohn Marino 	    result_low = MIN (low0, low1);
4163*e4b17023SJohn Marino 	    break;
4164*e4b17023SJohn Marino 	  case MULT:
4165*e4b17023SJohn Marino 	    result_width = width0 + width1;
4166*e4b17023SJohn Marino 	    result_low = low0 + low1;
4167*e4b17023SJohn Marino 	    break;
4168*e4b17023SJohn Marino 	  case DIV:
4169*e4b17023SJohn Marino 	    if (width1 == 0)
4170*e4b17023SJohn Marino 	      break;
4171*e4b17023SJohn Marino 	    if (!op0_maybe_minusp && !op1_maybe_minusp)
4172*e4b17023SJohn Marino 	      result_width = width0;
4173*e4b17023SJohn Marino 	    break;
4174*e4b17023SJohn Marino 	  case UDIV:
4175*e4b17023SJohn Marino 	    if (width1 == 0)
4176*e4b17023SJohn Marino 	      break;
4177*e4b17023SJohn Marino 	    result_width = width0;
4178*e4b17023SJohn Marino 	    break;
4179*e4b17023SJohn Marino 	  case MOD:
4180*e4b17023SJohn Marino 	    if (width1 == 0)
4181*e4b17023SJohn Marino 	      break;
4182*e4b17023SJohn Marino 	    if (!op0_maybe_minusp && !op1_maybe_minusp)
4183*e4b17023SJohn Marino 	      result_width = MIN (width0, width1);
4184*e4b17023SJohn Marino 	    result_low = MIN (low0, low1);
4185*e4b17023SJohn Marino 	    break;
4186*e4b17023SJohn Marino 	  case UMOD:
4187*e4b17023SJohn Marino 	    if (width1 == 0)
4188*e4b17023SJohn Marino 	      break;
4189*e4b17023SJohn Marino 	    result_width = MIN (width0, width1);
4190*e4b17023SJohn Marino 	    result_low = MIN (low0, low1);
4191*e4b17023SJohn Marino 	    break;
4192*e4b17023SJohn Marino 	  default:
4193*e4b17023SJohn Marino 	    gcc_unreachable ();
4194*e4b17023SJohn Marino 	  }
4195*e4b17023SJohn Marino 
4196*e4b17023SJohn Marino 	if (result_width < mode_width)
4197*e4b17023SJohn Marino 	  nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
4198*e4b17023SJohn Marino 
4199*e4b17023SJohn Marino 	if (result_low > 0)
4200*e4b17023SJohn Marino 	  nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
4201*e4b17023SJohn Marino       }
4202*e4b17023SJohn Marino       break;
4203*e4b17023SJohn Marino 
4204*e4b17023SJohn Marino     case ZERO_EXTRACT:
4205*e4b17023SJohn Marino       if (CONST_INT_P (XEXP (x, 1))
4206*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4207*e4b17023SJohn Marino 	nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4208*e4b17023SJohn Marino       break;
4209*e4b17023SJohn Marino 
4210*e4b17023SJohn Marino     case SUBREG:
4211*e4b17023SJohn Marino       /* If this is a SUBREG formed for a promoted variable that has
4212*e4b17023SJohn Marino 	 been zero-extended, we know that at least the high-order bits
4213*e4b17023SJohn Marino 	 are zero, though others might be too.  */
4214*e4b17023SJohn Marino 
4215*e4b17023SJohn Marino       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4216*e4b17023SJohn Marino 	nonzero = GET_MODE_MASK (GET_MODE (x))
4217*e4b17023SJohn Marino 		  & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4218*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4219*e4b17023SJohn Marino 
4220*e4b17023SJohn Marino       inner_mode = GET_MODE (SUBREG_REG (x));
4221*e4b17023SJohn Marino       /* If the inner mode is a single word for both the host and target
4222*e4b17023SJohn Marino 	 machines, we can compute this from which bits of the inner
4223*e4b17023SJohn Marino 	 object might be nonzero.  */
4224*e4b17023SJohn Marino       if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4225*e4b17023SJohn Marino 	  && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
4226*e4b17023SJohn Marino 	{
4227*e4b17023SJohn Marino 	  nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4228*e4b17023SJohn Marino 					  known_x, known_mode, known_ret);
4229*e4b17023SJohn Marino 
4230*e4b17023SJohn Marino #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4231*e4b17023SJohn Marino 	  /* If this is a typical RISC machine, we only have to worry
4232*e4b17023SJohn Marino 	     about the way loads are extended.  */
4233*e4b17023SJohn Marino 	  if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4234*e4b17023SJohn Marino 	       ? val_signbit_known_set_p (inner_mode, nonzero)
4235*e4b17023SJohn Marino 	       : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
4236*e4b17023SJohn Marino 	      || !MEM_P (SUBREG_REG (x)))
4237*e4b17023SJohn Marino #endif
4238*e4b17023SJohn Marino 	    {
4239*e4b17023SJohn Marino 	      /* On many CISC machines, accessing an object in a wider mode
4240*e4b17023SJohn Marino 		 causes the high-order bits to become undefined.  So they are
4241*e4b17023SJohn Marino 		 not known to be zero.  */
4242*e4b17023SJohn Marino 	      if (GET_MODE_PRECISION (GET_MODE (x))
4243*e4b17023SJohn Marino 		  > GET_MODE_PRECISION (inner_mode))
4244*e4b17023SJohn Marino 		nonzero |= (GET_MODE_MASK (GET_MODE (x))
4245*e4b17023SJohn Marino 			    & ~GET_MODE_MASK (inner_mode));
4246*e4b17023SJohn Marino 	    }
4247*e4b17023SJohn Marino 	}
4248*e4b17023SJohn Marino       break;
4249*e4b17023SJohn Marino 
4250*e4b17023SJohn Marino     case ASHIFTRT:
4251*e4b17023SJohn Marino     case LSHIFTRT:
4252*e4b17023SJohn Marino     case ASHIFT:
4253*e4b17023SJohn Marino     case ROTATE:
4254*e4b17023SJohn Marino       /* The nonzero bits are in two classes: any bits within MODE
4255*e4b17023SJohn Marino 	 that aren't in GET_MODE (x) are always significant.  The rest of the
4256*e4b17023SJohn Marino 	 nonzero bits are those that are significant in the operand of
4257*e4b17023SJohn Marino 	 the shift when shifted the appropriate number of bits.  This
4258*e4b17023SJohn Marino 	 shows that high-order bits are cleared by the right shift and
4259*e4b17023SJohn Marino 	 low-order bits by left shifts.  */
4260*e4b17023SJohn Marino       if (CONST_INT_P (XEXP (x, 1))
4261*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) >= 0
4262*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4263*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4264*e4b17023SJohn Marino 	{
4265*e4b17023SJohn Marino 	  enum machine_mode inner_mode = GET_MODE (x);
4266*e4b17023SJohn Marino 	  unsigned int width = GET_MODE_PRECISION (inner_mode);
4267*e4b17023SJohn Marino 	  int count = INTVAL (XEXP (x, 1));
4268*e4b17023SJohn Marino 	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4269*e4b17023SJohn Marino 	  unsigned HOST_WIDE_INT op_nonzero
4270*e4b17023SJohn Marino 	    = cached_nonzero_bits (XEXP (x, 0), mode,
4271*e4b17023SJohn Marino 				   known_x, known_mode, known_ret);
4272*e4b17023SJohn Marino 	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4273*e4b17023SJohn Marino 	  unsigned HOST_WIDE_INT outer = 0;
4274*e4b17023SJohn Marino 
4275*e4b17023SJohn Marino 	  if (mode_width > width)
4276*e4b17023SJohn Marino 	    outer = (op_nonzero & nonzero & ~mode_mask);
4277*e4b17023SJohn Marino 
4278*e4b17023SJohn Marino 	  if (code == LSHIFTRT)
4279*e4b17023SJohn Marino 	    inner >>= count;
4280*e4b17023SJohn Marino 	  else if (code == ASHIFTRT)
4281*e4b17023SJohn Marino 	    {
4282*e4b17023SJohn Marino 	      inner >>= count;
4283*e4b17023SJohn Marino 
4284*e4b17023SJohn Marino 	      /* If the sign bit may have been nonzero before the shift, we
4285*e4b17023SJohn Marino 		 need to mark all the places it could have been copied to
4286*e4b17023SJohn Marino 		 by the shift as possibly nonzero.  */
4287*e4b17023SJohn Marino 	      if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4288*e4b17023SJohn Marino 		inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4289*e4b17023SJohn Marino 			   << (width - count);
4290*e4b17023SJohn Marino 	    }
4291*e4b17023SJohn Marino 	  else if (code == ASHIFT)
4292*e4b17023SJohn Marino 	    inner <<= count;
4293*e4b17023SJohn Marino 	  else
4294*e4b17023SJohn Marino 	    inner = ((inner << (count % width)
4295*e4b17023SJohn Marino 		      | (inner >> (width - (count % width)))) & mode_mask);
4296*e4b17023SJohn Marino 
4297*e4b17023SJohn Marino 	  nonzero &= (outer | inner);
4298*e4b17023SJohn Marino 	}
4299*e4b17023SJohn Marino       break;
4300*e4b17023SJohn Marino 
4301*e4b17023SJohn Marino     case FFS:
4302*e4b17023SJohn Marino     case POPCOUNT:
4303*e4b17023SJohn Marino       /* This is at most the number of bits in the mode.  */
4304*e4b17023SJohn Marino       nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4305*e4b17023SJohn Marino       break;
4306*e4b17023SJohn Marino 
4307*e4b17023SJohn Marino     case CLZ:
4308*e4b17023SJohn Marino       /* If CLZ has a known value at zero, then the nonzero bits are
4309*e4b17023SJohn Marino 	 that value, plus the number of bits in the mode minus one.  */
4310*e4b17023SJohn Marino       if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4311*e4b17023SJohn Marino 	nonzero
4312*e4b17023SJohn Marino 	  |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4313*e4b17023SJohn Marino       else
4314*e4b17023SJohn Marino 	nonzero = -1;
4315*e4b17023SJohn Marino       break;
4316*e4b17023SJohn Marino 
4317*e4b17023SJohn Marino     case CTZ:
4318*e4b17023SJohn Marino       /* If CTZ has a known value at zero, then the nonzero bits are
4319*e4b17023SJohn Marino 	 that value, plus the number of bits in the mode minus one.  */
4320*e4b17023SJohn Marino       if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4321*e4b17023SJohn Marino 	nonzero
4322*e4b17023SJohn Marino 	  |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4323*e4b17023SJohn Marino       else
4324*e4b17023SJohn Marino 	nonzero = -1;
4325*e4b17023SJohn Marino       break;
4326*e4b17023SJohn Marino 
4327*e4b17023SJohn Marino     case CLRSB:
4328*e4b17023SJohn Marino       /* This is at most the number of bits in the mode minus 1.  */
4329*e4b17023SJohn Marino       nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4330*e4b17023SJohn Marino       break;
4331*e4b17023SJohn Marino 
4332*e4b17023SJohn Marino     case PARITY:
4333*e4b17023SJohn Marino       nonzero = 1;
4334*e4b17023SJohn Marino       break;
4335*e4b17023SJohn Marino 
4336*e4b17023SJohn Marino     case IF_THEN_ELSE:
4337*e4b17023SJohn Marino       {
4338*e4b17023SJohn Marino 	unsigned HOST_WIDE_INT nonzero_true
4339*e4b17023SJohn Marino 	  = cached_nonzero_bits (XEXP (x, 1), mode,
4340*e4b17023SJohn Marino 				 known_x, known_mode, known_ret);
4341*e4b17023SJohn Marino 
4342*e4b17023SJohn Marino 	/* Don't call nonzero_bits for the second time if it cannot change
4343*e4b17023SJohn Marino 	   anything.  */
4344*e4b17023SJohn Marino 	if ((nonzero & nonzero_true) != nonzero)
4345*e4b17023SJohn Marino 	  nonzero &= nonzero_true
4346*e4b17023SJohn Marino       		     | cached_nonzero_bits (XEXP (x, 2), mode,
4347*e4b17023SJohn Marino 					    known_x, known_mode, known_ret);
4348*e4b17023SJohn Marino       }
4349*e4b17023SJohn Marino       break;
4350*e4b17023SJohn Marino 
4351*e4b17023SJohn Marino     default:
4352*e4b17023SJohn Marino       break;
4353*e4b17023SJohn Marino     }
4354*e4b17023SJohn Marino 
4355*e4b17023SJohn Marino   return nonzero;
4356*e4b17023SJohn Marino }
4357*e4b17023SJohn Marino 
4358*e4b17023SJohn Marino /* See the macro definition above.  */
4359*e4b17023SJohn Marino #undef cached_num_sign_bit_copies
4360*e4b17023SJohn Marino 
4361*e4b17023SJohn Marino 
4362*e4b17023SJohn Marino /* The function cached_num_sign_bit_copies is a wrapper around
4363*e4b17023SJohn Marino    num_sign_bit_copies1.  It avoids exponential behavior in
4364*e4b17023SJohn Marino    num_sign_bit_copies1 when X has identical subexpressions on the
4365*e4b17023SJohn Marino    first or the second level.  */
4366*e4b17023SJohn Marino 
4367*e4b17023SJohn Marino static unsigned int
cached_num_sign_bit_copies(const_rtx x,enum machine_mode mode,const_rtx known_x,enum machine_mode known_mode,unsigned int known_ret)4368*e4b17023SJohn Marino cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4369*e4b17023SJohn Marino 			    enum machine_mode known_mode,
4370*e4b17023SJohn Marino 			    unsigned int known_ret)
4371*e4b17023SJohn Marino {
4372*e4b17023SJohn Marino   if (x == known_x && mode == known_mode)
4373*e4b17023SJohn Marino     return known_ret;
4374*e4b17023SJohn Marino 
4375*e4b17023SJohn Marino   /* Try to find identical subexpressions.  If found call
4376*e4b17023SJohn Marino      num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4377*e4b17023SJohn Marino      the precomputed value for the subexpression as KNOWN_RET.  */
4378*e4b17023SJohn Marino 
4379*e4b17023SJohn Marino   if (ARITHMETIC_P (x))
4380*e4b17023SJohn Marino     {
4381*e4b17023SJohn Marino       rtx x0 = XEXP (x, 0);
4382*e4b17023SJohn Marino       rtx x1 = XEXP (x, 1);
4383*e4b17023SJohn Marino 
4384*e4b17023SJohn Marino       /* Check the first level.  */
4385*e4b17023SJohn Marino       if (x0 == x1)
4386*e4b17023SJohn Marino 	return
4387*e4b17023SJohn Marino 	  num_sign_bit_copies1 (x, mode, x0, mode,
4388*e4b17023SJohn Marino 				cached_num_sign_bit_copies (x0, mode, known_x,
4389*e4b17023SJohn Marino 							    known_mode,
4390*e4b17023SJohn Marino 							    known_ret));
4391*e4b17023SJohn Marino 
4392*e4b17023SJohn Marino       /* Check the second level.  */
4393*e4b17023SJohn Marino       if (ARITHMETIC_P (x0)
4394*e4b17023SJohn Marino 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4395*e4b17023SJohn Marino 	return
4396*e4b17023SJohn Marino 	  num_sign_bit_copies1 (x, mode, x1, mode,
4397*e4b17023SJohn Marino 				cached_num_sign_bit_copies (x1, mode, known_x,
4398*e4b17023SJohn Marino 							    known_mode,
4399*e4b17023SJohn Marino 							    known_ret));
4400*e4b17023SJohn Marino 
4401*e4b17023SJohn Marino       if (ARITHMETIC_P (x1)
4402*e4b17023SJohn Marino 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4403*e4b17023SJohn Marino 	return
4404*e4b17023SJohn Marino 	  num_sign_bit_copies1 (x, mode, x0, mode,
4405*e4b17023SJohn Marino 				cached_num_sign_bit_copies (x0, mode, known_x,
4406*e4b17023SJohn Marino 							    known_mode,
4407*e4b17023SJohn Marino 							    known_ret));
4408*e4b17023SJohn Marino     }
4409*e4b17023SJohn Marino 
4410*e4b17023SJohn Marino   return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4411*e4b17023SJohn Marino }
4412*e4b17023SJohn Marino 
4413*e4b17023SJohn Marino /* Return the number of bits at the high-order end of X that are known to
4414*e4b17023SJohn Marino    be equal to the sign bit.  X will be used in mode MODE; if MODE is
4415*e4b17023SJohn Marino    VOIDmode, X will be used in its own mode.  The returned value  will always
4416*e4b17023SJohn Marino    be between 1 and the number of bits in MODE.  */
4417*e4b17023SJohn Marino 
4418*e4b17023SJohn Marino static unsigned int
num_sign_bit_copies1(const_rtx x,enum machine_mode mode,const_rtx known_x,enum machine_mode known_mode,unsigned int known_ret)4419*e4b17023SJohn Marino num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4420*e4b17023SJohn Marino 		      enum machine_mode known_mode,
4421*e4b17023SJohn Marino 		      unsigned int known_ret)
4422*e4b17023SJohn Marino {
4423*e4b17023SJohn Marino   enum rtx_code code = GET_CODE (x);
4424*e4b17023SJohn Marino   unsigned int bitwidth = GET_MODE_PRECISION (mode);
4425*e4b17023SJohn Marino   int num0, num1, result;
4426*e4b17023SJohn Marino   unsigned HOST_WIDE_INT nonzero;
4427*e4b17023SJohn Marino 
4428*e4b17023SJohn Marino   /* If we weren't given a mode, use the mode of X.  If the mode is still
4429*e4b17023SJohn Marino      VOIDmode, we don't know anything.  Likewise if one of the modes is
4430*e4b17023SJohn Marino      floating-point.  */
4431*e4b17023SJohn Marino 
4432*e4b17023SJohn Marino   if (mode == VOIDmode)
4433*e4b17023SJohn Marino     mode = GET_MODE (x);
4434*e4b17023SJohn Marino 
4435*e4b17023SJohn Marino   if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4436*e4b17023SJohn Marino       || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4437*e4b17023SJohn Marino     return 1;
4438*e4b17023SJohn Marino 
4439*e4b17023SJohn Marino   /* For a smaller object, just ignore the high bits.  */
4440*e4b17023SJohn Marino   if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
4441*e4b17023SJohn Marino     {
4442*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4443*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4444*e4b17023SJohn Marino       return MAX (1,
4445*e4b17023SJohn Marino 		  num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
4446*e4b17023SJohn Marino     }
4447*e4b17023SJohn Marino 
4448*e4b17023SJohn Marino   if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
4449*e4b17023SJohn Marino     {
4450*e4b17023SJohn Marino #ifndef WORD_REGISTER_OPERATIONS
4451*e4b17023SJohn Marino       /* If this machine does not do all register operations on the entire
4452*e4b17023SJohn Marino 	 register and MODE is wider than the mode of X, we can say nothing
4453*e4b17023SJohn Marino 	 at all about the high-order bits.  */
4454*e4b17023SJohn Marino       return 1;
4455*e4b17023SJohn Marino #else
4456*e4b17023SJohn Marino       /* Likewise on machines that do, if the mode of the object is smaller
4457*e4b17023SJohn Marino 	 than a word and loads of that size don't sign extend, we can say
4458*e4b17023SJohn Marino 	 nothing about the high order bits.  */
4459*e4b17023SJohn Marino       if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
4460*e4b17023SJohn Marino #ifdef LOAD_EXTEND_OP
4461*e4b17023SJohn Marino 	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4462*e4b17023SJohn Marino #endif
4463*e4b17023SJohn Marino 	  )
4464*e4b17023SJohn Marino 	return 1;
4465*e4b17023SJohn Marino #endif
4466*e4b17023SJohn Marino     }
4467*e4b17023SJohn Marino 
4468*e4b17023SJohn Marino   switch (code)
4469*e4b17023SJohn Marino     {
4470*e4b17023SJohn Marino     case REG:
4471*e4b17023SJohn Marino 
4472*e4b17023SJohn Marino #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4473*e4b17023SJohn Marino       /* If pointers extend signed and this is a pointer in Pmode, say that
4474*e4b17023SJohn Marino 	 all the bits above ptr_mode are known to be sign bit copies.  */
4475*e4b17023SJohn Marino       /* As we do not know which address space the pointer is refering to,
4476*e4b17023SJohn Marino 	 we can do this only if the target does not support different pointer
4477*e4b17023SJohn Marino 	 or address modes depending on the address space.  */
4478*e4b17023SJohn Marino       if (target_default_pointer_address_modes_p ()
4479*e4b17023SJohn Marino 	  && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4480*e4b17023SJohn Marino 	  && mode == Pmode && REG_POINTER (x))
4481*e4b17023SJohn Marino 	return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
4482*e4b17023SJohn Marino #endif
4483*e4b17023SJohn Marino 
4484*e4b17023SJohn Marino       {
4485*e4b17023SJohn Marino 	unsigned int copies_for_hook = 1, copies = 1;
4486*e4b17023SJohn Marino 	rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4487*e4b17023SJohn Marino 						     known_mode, known_ret,
4488*e4b17023SJohn Marino 						     &copies_for_hook);
4489*e4b17023SJohn Marino 
4490*e4b17023SJohn Marino 	if (new_rtx)
4491*e4b17023SJohn Marino 	  copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4492*e4b17023SJohn Marino 					       known_mode, known_ret);
4493*e4b17023SJohn Marino 
4494*e4b17023SJohn Marino 	if (copies > 1 || copies_for_hook > 1)
4495*e4b17023SJohn Marino 	  return MAX (copies, copies_for_hook);
4496*e4b17023SJohn Marino 
4497*e4b17023SJohn Marino 	/* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
4498*e4b17023SJohn Marino       }
4499*e4b17023SJohn Marino       break;
4500*e4b17023SJohn Marino 
4501*e4b17023SJohn Marino     case MEM:
4502*e4b17023SJohn Marino #ifdef LOAD_EXTEND_OP
4503*e4b17023SJohn Marino       /* Some RISC machines sign-extend all loads of smaller than a word.  */
4504*e4b17023SJohn Marino       if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4505*e4b17023SJohn Marino 	return MAX (1, ((int) bitwidth
4506*e4b17023SJohn Marino 			- (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
4507*e4b17023SJohn Marino #endif
4508*e4b17023SJohn Marino       break;
4509*e4b17023SJohn Marino 
4510*e4b17023SJohn Marino     case CONST_INT:
4511*e4b17023SJohn Marino       /* If the constant is negative, take its 1's complement and remask.
4512*e4b17023SJohn Marino 	 Then see how many zero bits we have.  */
4513*e4b17023SJohn Marino       nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
4514*e4b17023SJohn Marino       if (bitwidth <= HOST_BITS_PER_WIDE_INT
4515*e4b17023SJohn Marino 	  && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4516*e4b17023SJohn Marino 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
4517*e4b17023SJohn Marino 
4518*e4b17023SJohn Marino       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4519*e4b17023SJohn Marino 
4520*e4b17023SJohn Marino     case SUBREG:
4521*e4b17023SJohn Marino       /* If this is a SUBREG for a promoted object that is sign-extended
4522*e4b17023SJohn Marino 	 and we are looking at it in a wider mode, we know that at least the
4523*e4b17023SJohn Marino 	 high-order bits are known to be sign bit copies.  */
4524*e4b17023SJohn Marino 
4525*e4b17023SJohn Marino       if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4526*e4b17023SJohn Marino 	{
4527*e4b17023SJohn Marino 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4528*e4b17023SJohn Marino 					     known_x, known_mode, known_ret);
4529*e4b17023SJohn Marino 	  return MAX ((int) bitwidth
4530*e4b17023SJohn Marino 		      - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
4531*e4b17023SJohn Marino 		      num0);
4532*e4b17023SJohn Marino 	}
4533*e4b17023SJohn Marino 
4534*e4b17023SJohn Marino       /* For a smaller object, just ignore the high bits.  */
4535*e4b17023SJohn Marino       if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
4536*e4b17023SJohn Marino 	{
4537*e4b17023SJohn Marino 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4538*e4b17023SJohn Marino 					     known_x, known_mode, known_ret);
4539*e4b17023SJohn Marino 	  return MAX (1, (num0
4540*e4b17023SJohn Marino 			  - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
4541*e4b17023SJohn Marino 				   - bitwidth)));
4542*e4b17023SJohn Marino 	}
4543*e4b17023SJohn Marino 
4544*e4b17023SJohn Marino #ifdef WORD_REGISTER_OPERATIONS
4545*e4b17023SJohn Marino #ifdef LOAD_EXTEND_OP
4546*e4b17023SJohn Marino       /* For paradoxical SUBREGs on machines where all register operations
4547*e4b17023SJohn Marino 	 affect the entire register, just look inside.  Note that we are
4548*e4b17023SJohn Marino 	 passing MODE to the recursive call, so the number of sign bit copies
4549*e4b17023SJohn Marino 	 will remain relative to that mode, not the inner mode.  */
4550*e4b17023SJohn Marino 
4551*e4b17023SJohn Marino       /* This works only if loads sign extend.  Otherwise, if we get a
4552*e4b17023SJohn Marino 	 reload for the inner part, it may be loaded from the stack, and
4553*e4b17023SJohn Marino 	 then we lose all sign bit copies that existed before the store
4554*e4b17023SJohn Marino 	 to the stack.  */
4555*e4b17023SJohn Marino 
4556*e4b17023SJohn Marino       if (paradoxical_subreg_p (x)
4557*e4b17023SJohn Marino 	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4558*e4b17023SJohn Marino 	  && MEM_P (SUBREG_REG (x)))
4559*e4b17023SJohn Marino 	return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4560*e4b17023SJohn Marino 					   known_x, known_mode, known_ret);
4561*e4b17023SJohn Marino #endif
4562*e4b17023SJohn Marino #endif
4563*e4b17023SJohn Marino       break;
4564*e4b17023SJohn Marino 
4565*e4b17023SJohn Marino     case SIGN_EXTRACT:
4566*e4b17023SJohn Marino       if (CONST_INT_P (XEXP (x, 1)))
4567*e4b17023SJohn Marino 	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4568*e4b17023SJohn Marino       break;
4569*e4b17023SJohn Marino 
4570*e4b17023SJohn Marino     case SIGN_EXTEND:
4571*e4b17023SJohn Marino       return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4572*e4b17023SJohn Marino 	      + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4573*e4b17023SJohn Marino 					    known_x, known_mode, known_ret));
4574*e4b17023SJohn Marino 
4575*e4b17023SJohn Marino     case TRUNCATE:
4576*e4b17023SJohn Marino       /* For a smaller object, just ignore the high bits.  */
4577*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4578*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4579*e4b17023SJohn Marino       return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4580*e4b17023SJohn Marino 				    - bitwidth)));
4581*e4b17023SJohn Marino 
4582*e4b17023SJohn Marino     case NOT:
4583*e4b17023SJohn Marino       return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4584*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4585*e4b17023SJohn Marino 
4586*e4b17023SJohn Marino     case ROTATE:       case ROTATERT:
4587*e4b17023SJohn Marino       /* If we are rotating left by a number of bits less than the number
4588*e4b17023SJohn Marino 	 of sign bit copies, we can just subtract that amount from the
4589*e4b17023SJohn Marino 	 number.  */
4590*e4b17023SJohn Marino       if (CONST_INT_P (XEXP (x, 1))
4591*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) >= 0
4592*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4593*e4b17023SJohn Marino 	{
4594*e4b17023SJohn Marino 	  num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4595*e4b17023SJohn Marino 					     known_x, known_mode, known_ret);
4596*e4b17023SJohn Marino 	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4597*e4b17023SJohn Marino 				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4598*e4b17023SJohn Marino 	}
4599*e4b17023SJohn Marino       break;
4600*e4b17023SJohn Marino 
4601*e4b17023SJohn Marino     case NEG:
4602*e4b17023SJohn Marino       /* In general, this subtracts one sign bit copy.  But if the value
4603*e4b17023SJohn Marino 	 is known to be positive, the number of sign bit copies is the
4604*e4b17023SJohn Marino 	 same as that of the input.  Finally, if the input has just one bit
4605*e4b17023SJohn Marino 	 that might be nonzero, all the bits are copies of the sign bit.  */
4606*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4607*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4608*e4b17023SJohn Marino       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4609*e4b17023SJohn Marino 	return num0 > 1 ? num0 - 1 : 1;
4610*e4b17023SJohn Marino 
4611*e4b17023SJohn Marino       nonzero = nonzero_bits (XEXP (x, 0), mode);
4612*e4b17023SJohn Marino       if (nonzero == 1)
4613*e4b17023SJohn Marino 	return bitwidth;
4614*e4b17023SJohn Marino 
4615*e4b17023SJohn Marino       if (num0 > 1
4616*e4b17023SJohn Marino 	  && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4617*e4b17023SJohn Marino 	num0--;
4618*e4b17023SJohn Marino 
4619*e4b17023SJohn Marino       return num0;
4620*e4b17023SJohn Marino 
4621*e4b17023SJohn Marino     case IOR:   case AND:   case XOR:
4622*e4b17023SJohn Marino     case SMIN:  case SMAX:  case UMIN:  case UMAX:
4623*e4b17023SJohn Marino       /* Logical operations will preserve the number of sign-bit copies.
4624*e4b17023SJohn Marino 	 MIN and MAX operations always return one of the operands.  */
4625*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4626*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4627*e4b17023SJohn Marino       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4628*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4629*e4b17023SJohn Marino 
4630*e4b17023SJohn Marino       /* If num1 is clearing some of the top bits then regardless of
4631*e4b17023SJohn Marino 	 the other term, we are guaranteed to have at least that many
4632*e4b17023SJohn Marino 	 high-order zero bits.  */
4633*e4b17023SJohn Marino       if (code == AND
4634*e4b17023SJohn Marino 	  && num1 > 1
4635*e4b17023SJohn Marino 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
4636*e4b17023SJohn Marino 	  && CONST_INT_P (XEXP (x, 1))
4637*e4b17023SJohn Marino 	  && (UINTVAL (XEXP (x, 1))
4638*e4b17023SJohn Marino 	      & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
4639*e4b17023SJohn Marino 	return num1;
4640*e4b17023SJohn Marino 
4641*e4b17023SJohn Marino       /* Similarly for IOR when setting high-order bits.  */
4642*e4b17023SJohn Marino       if (code == IOR
4643*e4b17023SJohn Marino 	  && num1 > 1
4644*e4b17023SJohn Marino 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
4645*e4b17023SJohn Marino 	  && CONST_INT_P (XEXP (x, 1))
4646*e4b17023SJohn Marino 	  && (UINTVAL (XEXP (x, 1))
4647*e4b17023SJohn Marino 	      & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4648*e4b17023SJohn Marino 	return num1;
4649*e4b17023SJohn Marino 
4650*e4b17023SJohn Marino       return MIN (num0, num1);
4651*e4b17023SJohn Marino 
4652*e4b17023SJohn Marino     case PLUS:  case MINUS:
4653*e4b17023SJohn Marino       /* For addition and subtraction, we can have a 1-bit carry.  However,
4654*e4b17023SJohn Marino 	 if we are subtracting 1 from a positive number, there will not
4655*e4b17023SJohn Marino 	 be such a carry.  Furthermore, if the positive number is known to
4656*e4b17023SJohn Marino 	 be 0 or 1, we know the result is either -1 or 0.  */
4657*e4b17023SJohn Marino 
4658*e4b17023SJohn Marino       if (code == PLUS && XEXP (x, 1) == constm1_rtx
4659*e4b17023SJohn Marino 	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
4660*e4b17023SJohn Marino 	{
4661*e4b17023SJohn Marino 	  nonzero = nonzero_bits (XEXP (x, 0), mode);
4662*e4b17023SJohn Marino 	  if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4663*e4b17023SJohn Marino 	    return (nonzero == 1 || nonzero == 0 ? bitwidth
4664*e4b17023SJohn Marino 		    : bitwidth - floor_log2 (nonzero) - 1);
4665*e4b17023SJohn Marino 	}
4666*e4b17023SJohn Marino 
4667*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4668*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4669*e4b17023SJohn Marino       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4670*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4671*e4b17023SJohn Marino       result = MAX (1, MIN (num0, num1) - 1);
4672*e4b17023SJohn Marino 
4673*e4b17023SJohn Marino       return result;
4674*e4b17023SJohn Marino 
4675*e4b17023SJohn Marino     case MULT:
4676*e4b17023SJohn Marino       /* The number of bits of the product is the sum of the number of
4677*e4b17023SJohn Marino 	 bits of both terms.  However, unless one of the terms if known
4678*e4b17023SJohn Marino 	 to be positive, we must allow for an additional bit since negating
4679*e4b17023SJohn Marino 	 a negative number can remove one sign bit copy.  */
4680*e4b17023SJohn Marino 
4681*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4682*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4683*e4b17023SJohn Marino       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4684*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4685*e4b17023SJohn Marino 
4686*e4b17023SJohn Marino       result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4687*e4b17023SJohn Marino       if (result > 0
4688*e4b17023SJohn Marino 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4689*e4b17023SJohn Marino 	      || (((nonzero_bits (XEXP (x, 0), mode)
4690*e4b17023SJohn Marino 		    & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4691*e4b17023SJohn Marino 		  && ((nonzero_bits (XEXP (x, 1), mode)
4692*e4b17023SJohn Marino 		       & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4693*e4b17023SJohn Marino 		      != 0))))
4694*e4b17023SJohn Marino 	result--;
4695*e4b17023SJohn Marino 
4696*e4b17023SJohn Marino       return MAX (1, result);
4697*e4b17023SJohn Marino 
4698*e4b17023SJohn Marino     case UDIV:
4699*e4b17023SJohn Marino       /* The result must be <= the first operand.  If the first operand
4700*e4b17023SJohn Marino 	 has the high bit set, we know nothing about the number of sign
4701*e4b17023SJohn Marino 	 bit copies.  */
4702*e4b17023SJohn Marino       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4703*e4b17023SJohn Marino 	return 1;
4704*e4b17023SJohn Marino       else if ((nonzero_bits (XEXP (x, 0), mode)
4705*e4b17023SJohn Marino 		& ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4706*e4b17023SJohn Marino 	return 1;
4707*e4b17023SJohn Marino       else
4708*e4b17023SJohn Marino 	return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4709*e4b17023SJohn Marino 					   known_x, known_mode, known_ret);
4710*e4b17023SJohn Marino 
4711*e4b17023SJohn Marino     case UMOD:
4712*e4b17023SJohn Marino       /* The result must be <= the second operand.  If the second operand
4713*e4b17023SJohn Marino 	 has (or just might have) the high bit set, we know nothing about
4714*e4b17023SJohn Marino 	 the number of sign bit copies.  */
4715*e4b17023SJohn Marino       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4716*e4b17023SJohn Marino 	return 1;
4717*e4b17023SJohn Marino       else if ((nonzero_bits (XEXP (x, 1), mode)
4718*e4b17023SJohn Marino 		& ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4719*e4b17023SJohn Marino 	return 1;
4720*e4b17023SJohn Marino       else
4721*e4b17023SJohn Marino 	return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4722*e4b17023SJohn Marino 					   known_x, known_mode, known_ret);
4723*e4b17023SJohn Marino 
4724*e4b17023SJohn Marino     case DIV:
4725*e4b17023SJohn Marino       /* Similar to unsigned division, except that we have to worry about
4726*e4b17023SJohn Marino 	 the case where the divisor is negative, in which case we have
4727*e4b17023SJohn Marino 	 to add 1.  */
4728*e4b17023SJohn Marino       result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4729*e4b17023SJohn Marino 					   known_x, known_mode, known_ret);
4730*e4b17023SJohn Marino       if (result > 1
4731*e4b17023SJohn Marino 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4732*e4b17023SJohn Marino 	      || (nonzero_bits (XEXP (x, 1), mode)
4733*e4b17023SJohn Marino 		  & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4734*e4b17023SJohn Marino 	result--;
4735*e4b17023SJohn Marino 
4736*e4b17023SJohn Marino       return result;
4737*e4b17023SJohn Marino 
4738*e4b17023SJohn Marino     case MOD:
4739*e4b17023SJohn Marino       result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4740*e4b17023SJohn Marino 					   known_x, known_mode, known_ret);
4741*e4b17023SJohn Marino       if (result > 1
4742*e4b17023SJohn Marino 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4743*e4b17023SJohn Marino 	      || (nonzero_bits (XEXP (x, 1), mode)
4744*e4b17023SJohn Marino 		  & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4745*e4b17023SJohn Marino 	result--;
4746*e4b17023SJohn Marino 
4747*e4b17023SJohn Marino       return result;
4748*e4b17023SJohn Marino 
4749*e4b17023SJohn Marino     case ASHIFTRT:
4750*e4b17023SJohn Marino       /* Shifts by a constant add to the number of bits equal to the
4751*e4b17023SJohn Marino 	 sign bit.  */
4752*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4753*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4754*e4b17023SJohn Marino       if (CONST_INT_P (XEXP (x, 1))
4755*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) > 0
4756*e4b17023SJohn Marino 	  && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4757*e4b17023SJohn Marino 	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4758*e4b17023SJohn Marino 
4759*e4b17023SJohn Marino       return num0;
4760*e4b17023SJohn Marino 
4761*e4b17023SJohn Marino     case ASHIFT:
4762*e4b17023SJohn Marino       /* Left shifts destroy copies.  */
4763*e4b17023SJohn Marino       if (!CONST_INT_P (XEXP (x, 1))
4764*e4b17023SJohn Marino 	  || INTVAL (XEXP (x, 1)) < 0
4765*e4b17023SJohn Marino 	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4766*e4b17023SJohn Marino 	  || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
4767*e4b17023SJohn Marino 	return 1;
4768*e4b17023SJohn Marino 
4769*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4770*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4771*e4b17023SJohn Marino       return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4772*e4b17023SJohn Marino 
4773*e4b17023SJohn Marino     case IF_THEN_ELSE:
4774*e4b17023SJohn Marino       num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4775*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4776*e4b17023SJohn Marino       num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4777*e4b17023SJohn Marino 					 known_x, known_mode, known_ret);
4778*e4b17023SJohn Marino       return MIN (num0, num1);
4779*e4b17023SJohn Marino 
4780*e4b17023SJohn Marino     case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
4781*e4b17023SJohn Marino     case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
4782*e4b17023SJohn Marino     case GEU: case GTU: case LEU: case LTU:
4783*e4b17023SJohn Marino     case UNORDERED: case ORDERED:
4784*e4b17023SJohn Marino       /* If the constant is negative, take its 1's complement and remask.
4785*e4b17023SJohn Marino 	 Then see how many zero bits we have.  */
4786*e4b17023SJohn Marino       nonzero = STORE_FLAG_VALUE;
4787*e4b17023SJohn Marino       if (bitwidth <= HOST_BITS_PER_WIDE_INT
4788*e4b17023SJohn Marino 	  && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4789*e4b17023SJohn Marino 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
4790*e4b17023SJohn Marino 
4791*e4b17023SJohn Marino       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4792*e4b17023SJohn Marino 
4793*e4b17023SJohn Marino     default:
4794*e4b17023SJohn Marino       break;
4795*e4b17023SJohn Marino     }
4796*e4b17023SJohn Marino 
4797*e4b17023SJohn Marino   /* If we haven't been able to figure it out by one of the above rules,
4798*e4b17023SJohn Marino      see if some of the high-order bits are known to be zero.  If so,
4799*e4b17023SJohn Marino      count those bits and return one less than that amount.  If we can't
4800*e4b17023SJohn Marino      safely compute the mask for this mode, always return BITWIDTH.  */
4801*e4b17023SJohn Marino 
4802*e4b17023SJohn Marino   bitwidth = GET_MODE_PRECISION (mode);
4803*e4b17023SJohn Marino   if (bitwidth > HOST_BITS_PER_WIDE_INT)
4804*e4b17023SJohn Marino     return 1;
4805*e4b17023SJohn Marino 
4806*e4b17023SJohn Marino   nonzero = nonzero_bits (x, mode);
4807*e4b17023SJohn Marino   return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
4808*e4b17023SJohn Marino 	 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4809*e4b17023SJohn Marino }
4810*e4b17023SJohn Marino 
4811*e4b17023SJohn Marino /* Calculate the rtx_cost of a single instruction.  A return value of
4812*e4b17023SJohn Marino    zero indicates an instruction pattern without a known cost.  */
4813*e4b17023SJohn Marino 
4814*e4b17023SJohn Marino int
insn_rtx_cost(rtx pat,bool speed)4815*e4b17023SJohn Marino insn_rtx_cost (rtx pat, bool speed)
4816*e4b17023SJohn Marino {
4817*e4b17023SJohn Marino   int i, cost;
4818*e4b17023SJohn Marino   rtx set;
4819*e4b17023SJohn Marino 
4820*e4b17023SJohn Marino   /* Extract the single set rtx from the instruction pattern.
4821*e4b17023SJohn Marino      We can't use single_set since we only have the pattern.  */
4822*e4b17023SJohn Marino   if (GET_CODE (pat) == SET)
4823*e4b17023SJohn Marino     set = pat;
4824*e4b17023SJohn Marino   else if (GET_CODE (pat) == PARALLEL)
4825*e4b17023SJohn Marino     {
4826*e4b17023SJohn Marino       set = NULL_RTX;
4827*e4b17023SJohn Marino       for (i = 0; i < XVECLEN (pat, 0); i++)
4828*e4b17023SJohn Marino 	{
4829*e4b17023SJohn Marino 	  rtx x = XVECEXP (pat, 0, i);
4830*e4b17023SJohn Marino 	  if (GET_CODE (x) == SET)
4831*e4b17023SJohn Marino 	    {
4832*e4b17023SJohn Marino 	      if (set)
4833*e4b17023SJohn Marino 		return 0;
4834*e4b17023SJohn Marino 	      set = x;
4835*e4b17023SJohn Marino 	    }
4836*e4b17023SJohn Marino 	}
4837*e4b17023SJohn Marino       if (!set)
4838*e4b17023SJohn Marino 	return 0;
4839*e4b17023SJohn Marino     }
4840*e4b17023SJohn Marino   else
4841*e4b17023SJohn Marino     return 0;
4842*e4b17023SJohn Marino 
4843*e4b17023SJohn Marino   cost = set_src_cost (SET_SRC (set), speed);
4844*e4b17023SJohn Marino   return cost > 0 ? cost : COSTS_N_INSNS (1);
4845*e4b17023SJohn Marino }
4846*e4b17023SJohn Marino 
4847*e4b17023SJohn Marino /* Given an insn INSN and condition COND, return the condition in a
4848*e4b17023SJohn Marino    canonical form to simplify testing by callers.  Specifically:
4849*e4b17023SJohn Marino 
4850*e4b17023SJohn Marino    (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4851*e4b17023SJohn Marino    (2) Both operands will be machine operands; (cc0) will have been replaced.
4852*e4b17023SJohn Marino    (3) If an operand is a constant, it will be the second operand.
4853*e4b17023SJohn Marino    (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4854*e4b17023SJohn Marino        for GE, GEU, and LEU.
4855*e4b17023SJohn Marino 
4856*e4b17023SJohn Marino    If the condition cannot be understood, or is an inequality floating-point
4857*e4b17023SJohn Marino    comparison which needs to be reversed, 0 will be returned.
4858*e4b17023SJohn Marino 
4859*e4b17023SJohn Marino    If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4860*e4b17023SJohn Marino 
4861*e4b17023SJohn Marino    If EARLIEST is nonzero, it is a pointer to a place where the earliest
4862*e4b17023SJohn Marino    insn used in locating the condition was found.  If a replacement test
4863*e4b17023SJohn Marino    of the condition is desired, it should be placed in front of that
4864*e4b17023SJohn Marino    insn and we will be sure that the inputs are still valid.
4865*e4b17023SJohn Marino 
4866*e4b17023SJohn Marino    If WANT_REG is nonzero, we wish the condition to be relative to that
4867*e4b17023SJohn Marino    register, if possible.  Therefore, do not canonicalize the condition
4868*e4b17023SJohn Marino    further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
4869*e4b17023SJohn Marino    to be a compare to a CC mode register.
4870*e4b17023SJohn Marino 
4871*e4b17023SJohn Marino    If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4872*e4b17023SJohn Marino    and at INSN.  */
4873*e4b17023SJohn Marino 
4874*e4b17023SJohn Marino rtx
canonicalize_condition(rtx insn,rtx cond,int reverse,rtx * earliest,rtx want_reg,int allow_cc_mode,int valid_at_insn_p)4875*e4b17023SJohn Marino canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4876*e4b17023SJohn Marino 			rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4877*e4b17023SJohn Marino {
4878*e4b17023SJohn Marino   enum rtx_code code;
4879*e4b17023SJohn Marino   rtx prev = insn;
4880*e4b17023SJohn Marino   const_rtx set;
4881*e4b17023SJohn Marino   rtx tem;
4882*e4b17023SJohn Marino   rtx op0, op1;
4883*e4b17023SJohn Marino   int reverse_code = 0;
4884*e4b17023SJohn Marino   enum machine_mode mode;
4885*e4b17023SJohn Marino   basic_block bb = BLOCK_FOR_INSN (insn);
4886*e4b17023SJohn Marino 
4887*e4b17023SJohn Marino   code = GET_CODE (cond);
4888*e4b17023SJohn Marino   mode = GET_MODE (cond);
4889*e4b17023SJohn Marino   op0 = XEXP (cond, 0);
4890*e4b17023SJohn Marino   op1 = XEXP (cond, 1);
4891*e4b17023SJohn Marino 
4892*e4b17023SJohn Marino   if (reverse)
4893*e4b17023SJohn Marino     code = reversed_comparison_code (cond, insn);
4894*e4b17023SJohn Marino   if (code == UNKNOWN)
4895*e4b17023SJohn Marino     return 0;
4896*e4b17023SJohn Marino 
4897*e4b17023SJohn Marino   if (earliest)
4898*e4b17023SJohn Marino     *earliest = insn;
4899*e4b17023SJohn Marino 
4900*e4b17023SJohn Marino   /* If we are comparing a register with zero, see if the register is set
4901*e4b17023SJohn Marino      in the previous insn to a COMPARE or a comparison operation.  Perform
4902*e4b17023SJohn Marino      the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4903*e4b17023SJohn Marino      in cse.c  */
4904*e4b17023SJohn Marino 
4905*e4b17023SJohn Marino   while ((GET_RTX_CLASS (code) == RTX_COMPARE
4906*e4b17023SJohn Marino 	  || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4907*e4b17023SJohn Marino 	 && op1 == CONST0_RTX (GET_MODE (op0))
4908*e4b17023SJohn Marino 	 && op0 != want_reg)
4909*e4b17023SJohn Marino     {
4910*e4b17023SJohn Marino       /* Set nonzero when we find something of interest.  */
4911*e4b17023SJohn Marino       rtx x = 0;
4912*e4b17023SJohn Marino 
4913*e4b17023SJohn Marino #ifdef HAVE_cc0
4914*e4b17023SJohn Marino       /* If comparison with cc0, import actual comparison from compare
4915*e4b17023SJohn Marino 	 insn.  */
4916*e4b17023SJohn Marino       if (op0 == cc0_rtx)
4917*e4b17023SJohn Marino 	{
4918*e4b17023SJohn Marino 	  if ((prev = prev_nonnote_insn (prev)) == 0
4919*e4b17023SJohn Marino 	      || !NONJUMP_INSN_P (prev)
4920*e4b17023SJohn Marino 	      || (set = single_set (prev)) == 0
4921*e4b17023SJohn Marino 	      || SET_DEST (set) != cc0_rtx)
4922*e4b17023SJohn Marino 	    return 0;
4923*e4b17023SJohn Marino 
4924*e4b17023SJohn Marino 	  op0 = SET_SRC (set);
4925*e4b17023SJohn Marino 	  op1 = CONST0_RTX (GET_MODE (op0));
4926*e4b17023SJohn Marino 	  if (earliest)
4927*e4b17023SJohn Marino 	    *earliest = prev;
4928*e4b17023SJohn Marino 	}
4929*e4b17023SJohn Marino #endif
4930*e4b17023SJohn Marino 
4931*e4b17023SJohn Marino       /* If this is a COMPARE, pick up the two things being compared.  */
4932*e4b17023SJohn Marino       if (GET_CODE (op0) == COMPARE)
4933*e4b17023SJohn Marino 	{
4934*e4b17023SJohn Marino 	  op1 = XEXP (op0, 1);
4935*e4b17023SJohn Marino 	  op0 = XEXP (op0, 0);
4936*e4b17023SJohn Marino 	  continue;
4937*e4b17023SJohn Marino 	}
4938*e4b17023SJohn Marino       else if (!REG_P (op0))
4939*e4b17023SJohn Marino 	break;
4940*e4b17023SJohn Marino 
4941*e4b17023SJohn Marino       /* Go back to the previous insn.  Stop if it is not an INSN.  We also
4942*e4b17023SJohn Marino 	 stop if it isn't a single set or if it has a REG_INC note because
4943*e4b17023SJohn Marino 	 we don't want to bother dealing with it.  */
4944*e4b17023SJohn Marino 
4945*e4b17023SJohn Marino       prev = prev_nonnote_nondebug_insn (prev);
4946*e4b17023SJohn Marino 
4947*e4b17023SJohn Marino       if (prev == 0
4948*e4b17023SJohn Marino 	  || !NONJUMP_INSN_P (prev)
4949*e4b17023SJohn Marino 	  || FIND_REG_INC_NOTE (prev, NULL_RTX)
4950*e4b17023SJohn Marino 	  /* In cfglayout mode, there do not have to be labels at the
4951*e4b17023SJohn Marino 	     beginning of a block, or jumps at the end, so the previous
4952*e4b17023SJohn Marino 	     conditions would not stop us when we reach bb boundary.  */
4953*e4b17023SJohn Marino 	  || BLOCK_FOR_INSN (prev) != bb)
4954*e4b17023SJohn Marino 	break;
4955*e4b17023SJohn Marino 
4956*e4b17023SJohn Marino       set = set_of (op0, prev);
4957*e4b17023SJohn Marino 
4958*e4b17023SJohn Marino       if (set
4959*e4b17023SJohn Marino 	  && (GET_CODE (set) != SET
4960*e4b17023SJohn Marino 	      || !rtx_equal_p (SET_DEST (set), op0)))
4961*e4b17023SJohn Marino 	break;
4962*e4b17023SJohn Marino 
4963*e4b17023SJohn Marino       /* If this is setting OP0, get what it sets it to if it looks
4964*e4b17023SJohn Marino 	 relevant.  */
4965*e4b17023SJohn Marino       if (set)
4966*e4b17023SJohn Marino 	{
4967*e4b17023SJohn Marino 	  enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4968*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE
4969*e4b17023SJohn Marino 	  REAL_VALUE_TYPE fsfv;
4970*e4b17023SJohn Marino #endif
4971*e4b17023SJohn Marino 
4972*e4b17023SJohn Marino 	  /* ??? We may not combine comparisons done in a CCmode with
4973*e4b17023SJohn Marino 	     comparisons not done in a CCmode.  This is to aid targets
4974*e4b17023SJohn Marino 	     like Alpha that have an IEEE compliant EQ instruction, and
4975*e4b17023SJohn Marino 	     a non-IEEE compliant BEQ instruction.  The use of CCmode is
4976*e4b17023SJohn Marino 	     actually artificial, simply to prevent the combination, but
4977*e4b17023SJohn Marino 	     should not affect other platforms.
4978*e4b17023SJohn Marino 
4979*e4b17023SJohn Marino 	     However, we must allow VOIDmode comparisons to match either
4980*e4b17023SJohn Marino 	     CCmode or non-CCmode comparison, because some ports have
4981*e4b17023SJohn Marino 	     modeless comparisons inside branch patterns.
4982*e4b17023SJohn Marino 
4983*e4b17023SJohn Marino 	     ??? This mode check should perhaps look more like the mode check
4984*e4b17023SJohn Marino 	     in simplify_comparison in combine.  */
4985*e4b17023SJohn Marino 
4986*e4b17023SJohn Marino 	  if ((GET_CODE (SET_SRC (set)) == COMPARE
4987*e4b17023SJohn Marino 	       || (((code == NE
4988*e4b17023SJohn Marino 		     || (code == LT
4989*e4b17023SJohn Marino 			 && val_signbit_known_set_p (inner_mode,
4990*e4b17023SJohn Marino 						     STORE_FLAG_VALUE))
4991*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE
4992*e4b17023SJohn Marino 		     || (code == LT
4993*e4b17023SJohn Marino 			 && SCALAR_FLOAT_MODE_P (inner_mode)
4994*e4b17023SJohn Marino 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4995*e4b17023SJohn Marino 			     REAL_VALUE_NEGATIVE (fsfv)))
4996*e4b17023SJohn Marino #endif
4997*e4b17023SJohn Marino 		     ))
4998*e4b17023SJohn Marino 		   && COMPARISON_P (SET_SRC (set))))
4999*e4b17023SJohn Marino 	      && (((GET_MODE_CLASS (mode) == MODE_CC)
5000*e4b17023SJohn Marino 		   == (GET_MODE_CLASS (inner_mode) == MODE_CC))
5001*e4b17023SJohn Marino 		  || mode == VOIDmode || inner_mode == VOIDmode))
5002*e4b17023SJohn Marino 	    x = SET_SRC (set);
5003*e4b17023SJohn Marino 	  else if (((code == EQ
5004*e4b17023SJohn Marino 		     || (code == GE
5005*e4b17023SJohn Marino 			 && val_signbit_known_set_p (inner_mode,
5006*e4b17023SJohn Marino 						     STORE_FLAG_VALUE))
5007*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE
5008*e4b17023SJohn Marino 		     || (code == GE
5009*e4b17023SJohn Marino 			 && SCALAR_FLOAT_MODE_P (inner_mode)
5010*e4b17023SJohn Marino 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5011*e4b17023SJohn Marino 			     REAL_VALUE_NEGATIVE (fsfv)))
5012*e4b17023SJohn Marino #endif
5013*e4b17023SJohn Marino 		     ))
5014*e4b17023SJohn Marino 		   && COMPARISON_P (SET_SRC (set))
5015*e4b17023SJohn Marino 		   && (((GET_MODE_CLASS (mode) == MODE_CC)
5016*e4b17023SJohn Marino 			== (GET_MODE_CLASS (inner_mode) == MODE_CC))
5017*e4b17023SJohn Marino 		       || mode == VOIDmode || inner_mode == VOIDmode))
5018*e4b17023SJohn Marino 
5019*e4b17023SJohn Marino 	    {
5020*e4b17023SJohn Marino 	      reverse_code = 1;
5021*e4b17023SJohn Marino 	      x = SET_SRC (set);
5022*e4b17023SJohn Marino 	    }
5023*e4b17023SJohn Marino 	  else
5024*e4b17023SJohn Marino 	    break;
5025*e4b17023SJohn Marino 	}
5026*e4b17023SJohn Marino 
5027*e4b17023SJohn Marino       else if (reg_set_p (op0, prev))
5028*e4b17023SJohn Marino 	/* If this sets OP0, but not directly, we have to give up.  */
5029*e4b17023SJohn Marino 	break;
5030*e4b17023SJohn Marino 
5031*e4b17023SJohn Marino       if (x)
5032*e4b17023SJohn Marino 	{
5033*e4b17023SJohn Marino 	  /* If the caller is expecting the condition to be valid at INSN,
5034*e4b17023SJohn Marino 	     make sure X doesn't change before INSN.  */
5035*e4b17023SJohn Marino 	  if (valid_at_insn_p)
5036*e4b17023SJohn Marino 	    if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5037*e4b17023SJohn Marino 	      break;
5038*e4b17023SJohn Marino 	  if (COMPARISON_P (x))
5039*e4b17023SJohn Marino 	    code = GET_CODE (x);
5040*e4b17023SJohn Marino 	  if (reverse_code)
5041*e4b17023SJohn Marino 	    {
5042*e4b17023SJohn Marino 	      code = reversed_comparison_code (x, prev);
5043*e4b17023SJohn Marino 	      if (code == UNKNOWN)
5044*e4b17023SJohn Marino 		return 0;
5045*e4b17023SJohn Marino 	      reverse_code = 0;
5046*e4b17023SJohn Marino 	    }
5047*e4b17023SJohn Marino 
5048*e4b17023SJohn Marino 	  op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5049*e4b17023SJohn Marino 	  if (earliest)
5050*e4b17023SJohn Marino 	    *earliest = prev;
5051*e4b17023SJohn Marino 	}
5052*e4b17023SJohn Marino     }
5053*e4b17023SJohn Marino 
5054*e4b17023SJohn Marino   /* If constant is first, put it last.  */
5055*e4b17023SJohn Marino   if (CONSTANT_P (op0))
5056*e4b17023SJohn Marino     code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5057*e4b17023SJohn Marino 
5058*e4b17023SJohn Marino   /* If OP0 is the result of a comparison, we weren't able to find what
5059*e4b17023SJohn Marino      was really being compared, so fail.  */
5060*e4b17023SJohn Marino   if (!allow_cc_mode
5061*e4b17023SJohn Marino       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5062*e4b17023SJohn Marino     return 0;
5063*e4b17023SJohn Marino 
5064*e4b17023SJohn Marino   /* Canonicalize any ordered comparison with integers involving equality
5065*e4b17023SJohn Marino      if we can do computations in the relevant mode and we do not
5066*e4b17023SJohn Marino      overflow.  */
5067*e4b17023SJohn Marino 
5068*e4b17023SJohn Marino   if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
5069*e4b17023SJohn Marino       && CONST_INT_P (op1)
5070*e4b17023SJohn Marino       && GET_MODE (op0) != VOIDmode
5071*e4b17023SJohn Marino       && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
5072*e4b17023SJohn Marino     {
5073*e4b17023SJohn Marino       HOST_WIDE_INT const_val = INTVAL (op1);
5074*e4b17023SJohn Marino       unsigned HOST_WIDE_INT uconst_val = const_val;
5075*e4b17023SJohn Marino       unsigned HOST_WIDE_INT max_val
5076*e4b17023SJohn Marino 	= (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5077*e4b17023SJohn Marino 
5078*e4b17023SJohn Marino       switch (code)
5079*e4b17023SJohn Marino 	{
5080*e4b17023SJohn Marino 	case LE:
5081*e4b17023SJohn Marino 	  if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5082*e4b17023SJohn Marino 	    code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5083*e4b17023SJohn Marino 	  break;
5084*e4b17023SJohn Marino 
5085*e4b17023SJohn Marino 	/* When cross-compiling, const_val might be sign-extended from
5086*e4b17023SJohn Marino 	   BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5087*e4b17023SJohn Marino 	case GE:
5088*e4b17023SJohn Marino 	  if ((const_val & max_val)
5089*e4b17023SJohn Marino 	      != ((unsigned HOST_WIDE_INT) 1
5090*e4b17023SJohn Marino 		  << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
5091*e4b17023SJohn Marino 	    code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5092*e4b17023SJohn Marino 	  break;
5093*e4b17023SJohn Marino 
5094*e4b17023SJohn Marino 	case LEU:
5095*e4b17023SJohn Marino 	  if (uconst_val < max_val)
5096*e4b17023SJohn Marino 	    code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5097*e4b17023SJohn Marino 	  break;
5098*e4b17023SJohn Marino 
5099*e4b17023SJohn Marino 	case GEU:
5100*e4b17023SJohn Marino 	  if (uconst_val != 0)
5101*e4b17023SJohn Marino 	    code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5102*e4b17023SJohn Marino 	  break;
5103*e4b17023SJohn Marino 
5104*e4b17023SJohn Marino 	default:
5105*e4b17023SJohn Marino 	  break;
5106*e4b17023SJohn Marino 	}
5107*e4b17023SJohn Marino     }
5108*e4b17023SJohn Marino 
5109*e4b17023SJohn Marino   /* Never return CC0; return zero instead.  */
5110*e4b17023SJohn Marino   if (CC0_P (op0))
5111*e4b17023SJohn Marino     return 0;
5112*e4b17023SJohn Marino 
5113*e4b17023SJohn Marino   return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5114*e4b17023SJohn Marino }
5115*e4b17023SJohn Marino 
5116*e4b17023SJohn Marino /* Given a jump insn JUMP, return the condition that will cause it to branch
5117*e4b17023SJohn Marino    to its JUMP_LABEL.  If the condition cannot be understood, or is an
5118*e4b17023SJohn Marino    inequality floating-point comparison which needs to be reversed, 0 will
5119*e4b17023SJohn Marino    be returned.
5120*e4b17023SJohn Marino 
5121*e4b17023SJohn Marino    If EARLIEST is nonzero, it is a pointer to a place where the earliest
5122*e4b17023SJohn Marino    insn used in locating the condition was found.  If a replacement test
5123*e4b17023SJohn Marino    of the condition is desired, it should be placed in front of that
5124*e4b17023SJohn Marino    insn and we will be sure that the inputs are still valid.  If EARLIEST
5125*e4b17023SJohn Marino    is null, the returned condition will be valid at INSN.
5126*e4b17023SJohn Marino 
5127*e4b17023SJohn Marino    If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5128*e4b17023SJohn Marino    compare CC mode register.
5129*e4b17023SJohn Marino 
5130*e4b17023SJohn Marino    VALID_AT_INSN_P is the same as for canonicalize_condition.  */
5131*e4b17023SJohn Marino 
5132*e4b17023SJohn Marino rtx
get_condition(rtx jump,rtx * earliest,int allow_cc_mode,int valid_at_insn_p)5133*e4b17023SJohn Marino get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
5134*e4b17023SJohn Marino {
5135*e4b17023SJohn Marino   rtx cond;
5136*e4b17023SJohn Marino   int reverse;
5137*e4b17023SJohn Marino   rtx set;
5138*e4b17023SJohn Marino 
5139*e4b17023SJohn Marino   /* If this is not a standard conditional jump, we can't parse it.  */
5140*e4b17023SJohn Marino   if (!JUMP_P (jump)
5141*e4b17023SJohn Marino       || ! any_condjump_p (jump))
5142*e4b17023SJohn Marino     return 0;
5143*e4b17023SJohn Marino   set = pc_set (jump);
5144*e4b17023SJohn Marino 
5145*e4b17023SJohn Marino   cond = XEXP (SET_SRC (set), 0);
5146*e4b17023SJohn Marino 
5147*e4b17023SJohn Marino   /* If this branches to JUMP_LABEL when the condition is false, reverse
5148*e4b17023SJohn Marino      the condition.  */
5149*e4b17023SJohn Marino   reverse
5150*e4b17023SJohn Marino     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
5151*e4b17023SJohn Marino       && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
5152*e4b17023SJohn Marino 
5153*e4b17023SJohn Marino   return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5154*e4b17023SJohn Marino 				 allow_cc_mode, valid_at_insn_p);
5155*e4b17023SJohn Marino }
5156*e4b17023SJohn Marino 
5157*e4b17023SJohn Marino /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5158*e4b17023SJohn Marino    TARGET_MODE_REP_EXTENDED.
5159*e4b17023SJohn Marino 
5160*e4b17023SJohn Marino    Note that we assume that the property of
5161*e4b17023SJohn Marino    TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5162*e4b17023SJohn Marino    narrower than mode B.  I.e., if A is a mode narrower than B then in
5163*e4b17023SJohn Marino    order to be able to operate on it in mode B, mode A needs to
5164*e4b17023SJohn Marino    satisfy the requirements set by the representation of mode B.  */
5165*e4b17023SJohn Marino 
5166*e4b17023SJohn Marino static void
init_num_sign_bit_copies_in_rep(void)5167*e4b17023SJohn Marino init_num_sign_bit_copies_in_rep (void)
5168*e4b17023SJohn Marino {
5169*e4b17023SJohn Marino   enum machine_mode mode, in_mode;
5170*e4b17023SJohn Marino 
5171*e4b17023SJohn Marino   for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5172*e4b17023SJohn Marino        in_mode = GET_MODE_WIDER_MODE (mode))
5173*e4b17023SJohn Marino     for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5174*e4b17023SJohn Marino 	 mode = GET_MODE_WIDER_MODE (mode))
5175*e4b17023SJohn Marino       {
5176*e4b17023SJohn Marino 	enum machine_mode i;
5177*e4b17023SJohn Marino 
5178*e4b17023SJohn Marino 	/* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5179*e4b17023SJohn Marino 	   extends to the next widest mode.  */
5180*e4b17023SJohn Marino 	gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5181*e4b17023SJohn Marino 		    || GET_MODE_WIDER_MODE (mode) == in_mode);
5182*e4b17023SJohn Marino 
5183*e4b17023SJohn Marino 	/* We are in in_mode.  Count how many bits outside of mode
5184*e4b17023SJohn Marino 	   have to be copies of the sign-bit.  */
5185*e4b17023SJohn Marino 	for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5186*e4b17023SJohn Marino 	  {
5187*e4b17023SJohn Marino 	    enum machine_mode wider = GET_MODE_WIDER_MODE (i);
5188*e4b17023SJohn Marino 
5189*e4b17023SJohn Marino 	    if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5190*e4b17023SJohn Marino 		/* We can only check sign-bit copies starting from the
5191*e4b17023SJohn Marino 		   top-bit.  In order to be able to check the bits we
5192*e4b17023SJohn Marino 		   have already seen we pretend that subsequent bits
5193*e4b17023SJohn Marino 		   have to be sign-bit copies too.  */
5194*e4b17023SJohn Marino 		|| num_sign_bit_copies_in_rep [in_mode][mode])
5195*e4b17023SJohn Marino 	      num_sign_bit_copies_in_rep [in_mode][mode]
5196*e4b17023SJohn Marino 		+= GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
5197*e4b17023SJohn Marino 	  }
5198*e4b17023SJohn Marino       }
5199*e4b17023SJohn Marino }
5200*e4b17023SJohn Marino 
5201*e4b17023SJohn Marino /* Suppose that truncation from the machine mode of X to MODE is not a
5202*e4b17023SJohn Marino    no-op.  See if there is anything special about X so that we can
5203*e4b17023SJohn Marino    assume it already contains a truncated value of MODE.  */
5204*e4b17023SJohn Marino 
5205*e4b17023SJohn Marino bool
truncated_to_mode(enum machine_mode mode,const_rtx x)5206*e4b17023SJohn Marino truncated_to_mode (enum machine_mode mode, const_rtx x)
5207*e4b17023SJohn Marino {
5208*e4b17023SJohn Marino   /* This register has already been used in MODE without explicit
5209*e4b17023SJohn Marino      truncation.  */
5210*e4b17023SJohn Marino   if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5211*e4b17023SJohn Marino     return true;
5212*e4b17023SJohn Marino 
5213*e4b17023SJohn Marino   /* See if we already satisfy the requirements of MODE.  If yes we
5214*e4b17023SJohn Marino      can just switch to MODE.  */
5215*e4b17023SJohn Marino   if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5216*e4b17023SJohn Marino       && (num_sign_bit_copies (x, GET_MODE (x))
5217*e4b17023SJohn Marino 	  >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5218*e4b17023SJohn Marino     return true;
5219*e4b17023SJohn Marino 
5220*e4b17023SJohn Marino   return false;
5221*e4b17023SJohn Marino }
5222*e4b17023SJohn Marino 
5223*e4b17023SJohn Marino /* Initialize non_rtx_starting_operands, which is used to speed up
5224*e4b17023SJohn Marino    for_each_rtx.  */
5225*e4b17023SJohn Marino void
init_rtlanal(void)5226*e4b17023SJohn Marino init_rtlanal (void)
5227*e4b17023SJohn Marino {
5228*e4b17023SJohn Marino   int i;
5229*e4b17023SJohn Marino   for (i = 0; i < NUM_RTX_CODE; i++)
5230*e4b17023SJohn Marino     {
5231*e4b17023SJohn Marino       const char *format = GET_RTX_FORMAT (i);
5232*e4b17023SJohn Marino       const char *first = strpbrk (format, "eEV");
5233*e4b17023SJohn Marino       non_rtx_starting_operands[i] = first ? first - format : -1;
5234*e4b17023SJohn Marino     }
5235*e4b17023SJohn Marino 
5236*e4b17023SJohn Marino   init_num_sign_bit_copies_in_rep ();
5237*e4b17023SJohn Marino }
5238*e4b17023SJohn Marino 
5239*e4b17023SJohn Marino /* Check whether this is a constant pool constant.  */
5240*e4b17023SJohn Marino bool
constant_pool_constant_p(rtx x)5241*e4b17023SJohn Marino constant_pool_constant_p (rtx x)
5242*e4b17023SJohn Marino {
5243*e4b17023SJohn Marino   x = avoid_constant_pool_reference (x);
5244*e4b17023SJohn Marino   return GET_CODE (x) == CONST_DOUBLE;
5245*e4b17023SJohn Marino }
5246*e4b17023SJohn Marino 
5247*e4b17023SJohn Marino /* If M is a bitmask that selects a field of low-order bits within an item but
5248*e4b17023SJohn Marino    not the entire word, return the length of the field.  Return -1 otherwise.
5249*e4b17023SJohn Marino    M is used in machine mode MODE.  */
5250*e4b17023SJohn Marino 
5251*e4b17023SJohn Marino int
low_bitmask_len(enum machine_mode mode,unsigned HOST_WIDE_INT m)5252*e4b17023SJohn Marino low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
5253*e4b17023SJohn Marino {
5254*e4b17023SJohn Marino   if (mode != VOIDmode)
5255*e4b17023SJohn Marino     {
5256*e4b17023SJohn Marino       if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
5257*e4b17023SJohn Marino 	return -1;
5258*e4b17023SJohn Marino       m &= GET_MODE_MASK (mode);
5259*e4b17023SJohn Marino     }
5260*e4b17023SJohn Marino 
5261*e4b17023SJohn Marino   return exact_log2 (m + 1);
5262*e4b17023SJohn Marino }
5263