xref: /dflybsd-src/contrib/gcc-8.0/gcc/rtlanal.c (revision 95059079af47f9a66a175f374f2da1a5020e3255)
138fd1498Szrj /* Analyze RTL for GNU compiler.
238fd1498Szrj    Copyright (C) 1987-2018 Free Software Foundation, Inc.
338fd1498Szrj 
438fd1498Szrj This file is part of GCC.
538fd1498Szrj 
638fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
738fd1498Szrj the terms of the GNU General Public License as published by the Free
838fd1498Szrj Software Foundation; either version 3, or (at your option) any later
938fd1498Szrj version.
1038fd1498Szrj 
1138fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1238fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
1338fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1438fd1498Szrj for more details.
1538fd1498Szrj 
1638fd1498Szrj You should have received a copy of the GNU General Public License
1738fd1498Szrj along with GCC; see the file COPYING3.  If not see
1838fd1498Szrj <http://www.gnu.org/licenses/>.  */
1938fd1498Szrj 
2038fd1498Szrj 
2138fd1498Szrj #include "config.h"
2238fd1498Szrj #include "system.h"
2338fd1498Szrj #include "coretypes.h"
2438fd1498Szrj #include "backend.h"
2538fd1498Szrj #include "target.h"
2638fd1498Szrj #include "rtl.h"
2738fd1498Szrj #include "tree.h"
2838fd1498Szrj #include "predict.h"
2938fd1498Szrj #include "df.h"
3038fd1498Szrj #include "memmodel.h"
3138fd1498Szrj #include "tm_p.h"
3238fd1498Szrj #include "insn-config.h"
3338fd1498Szrj #include "regs.h"
3438fd1498Szrj #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
3538fd1498Szrj #include "recog.h"
3638fd1498Szrj #include "addresses.h"
3738fd1498Szrj #include "rtl-iter.h"
3838fd1498Szrj 
3938fd1498Szrj /* Forward declarations */
4038fd1498Szrj static void set_of_1 (rtx, const_rtx, void *);
4138fd1498Szrj static bool covers_regno_p (const_rtx, unsigned int);
4238fd1498Szrj static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
4338fd1498Szrj static int computed_jump_p_1 (const_rtx);
4438fd1498Szrj static void parms_set (rtx, const_rtx, void *);
4538fd1498Szrj 
4638fd1498Szrj static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, scalar_int_mode,
4738fd1498Szrj                                                    const_rtx, machine_mode,
4838fd1498Szrj                                                    unsigned HOST_WIDE_INT);
4938fd1498Szrj static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, scalar_int_mode,
5038fd1498Szrj 					     const_rtx, machine_mode,
5138fd1498Szrj                                              unsigned HOST_WIDE_INT);
5238fd1498Szrj static unsigned int cached_num_sign_bit_copies (const_rtx, scalar_int_mode,
5338fd1498Szrj 						const_rtx, machine_mode,
5438fd1498Szrj                                                 unsigned int);
5538fd1498Szrj static unsigned int num_sign_bit_copies1 (const_rtx, scalar_int_mode,
5638fd1498Szrj 					  const_rtx, machine_mode,
5738fd1498Szrj 					  unsigned int);
5838fd1498Szrj 
5938fd1498Szrj rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
6038fd1498Szrj rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
6138fd1498Szrj 
6238fd1498Szrj /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
6338fd1498Szrj    If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
6438fd1498Szrj    SIGN_EXTEND then while narrowing we also have to enforce the
6538fd1498Szrj    representation and sign-extend the value to mode DESTINATION_REP.
6638fd1498Szrj 
6738fd1498Szrj    If the value is already sign-extended to DESTINATION_REP mode we
6838fd1498Szrj    can just switch to DESTINATION mode on it.  For each pair of
6938fd1498Szrj    integral modes SOURCE and DESTINATION, when truncating from SOURCE
7038fd1498Szrj    to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
7138fd1498Szrj    contains the number of high-order bits in SOURCE that have to be
7238fd1498Szrj    copies of the sign-bit so that we can do this mode-switch to
7338fd1498Szrj    DESTINATION.  */
7438fd1498Szrj 
7538fd1498Szrj static unsigned int
7638fd1498Szrj num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
7738fd1498Szrj 
7838fd1498Szrj /* Store X into index I of ARRAY.  ARRAY is known to have at least I
7938fd1498Szrj    elements.  Return the new base of ARRAY.  */
8038fd1498Szrj 
8138fd1498Szrj template <typename T>
8238fd1498Szrj typename T::value_type *
add_single_to_queue(array_type & array,value_type * base,size_t i,value_type x)8338fd1498Szrj generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
8438fd1498Szrj 						  value_type *base,
8538fd1498Szrj 						  size_t i, value_type x)
8638fd1498Szrj {
8738fd1498Szrj   if (base == array.stack)
8838fd1498Szrj     {
8938fd1498Szrj       if (i < LOCAL_ELEMS)
9038fd1498Szrj 	{
9138fd1498Szrj 	  base[i] = x;
9238fd1498Szrj 	  return base;
9338fd1498Szrj 	}
9438fd1498Szrj       gcc_checking_assert (i == LOCAL_ELEMS);
9538fd1498Szrj       /* A previous iteration might also have moved from the stack to the
9638fd1498Szrj 	 heap, in which case the heap array will already be big enough.  */
9738fd1498Szrj       if (vec_safe_length (array.heap) <= i)
9838fd1498Szrj 	vec_safe_grow (array.heap, i + 1);
9938fd1498Szrj       base = array.heap->address ();
10038fd1498Szrj       memcpy (base, array.stack, sizeof (array.stack));
10138fd1498Szrj       base[LOCAL_ELEMS] = x;
10238fd1498Szrj       return base;
10338fd1498Szrj     }
10438fd1498Szrj   unsigned int length = array.heap->length ();
10538fd1498Szrj   if (length > i)
10638fd1498Szrj     {
10738fd1498Szrj       gcc_checking_assert (base == array.heap->address ());
10838fd1498Szrj       base[i] = x;
10938fd1498Szrj       return base;
11038fd1498Szrj     }
11138fd1498Szrj   else
11238fd1498Szrj     {
11338fd1498Szrj       gcc_checking_assert (i == length);
11438fd1498Szrj       vec_safe_push (array.heap, x);
11538fd1498Szrj       return array.heap->address ();
11638fd1498Szrj     }
11738fd1498Szrj }
11838fd1498Szrj 
11938fd1498Szrj /* Add the subrtxes of X to worklist ARRAY, starting at END.  Return the
12038fd1498Szrj    number of elements added to the worklist.  */
12138fd1498Szrj 
12238fd1498Szrj template <typename T>
12338fd1498Szrj size_t
add_subrtxes_to_queue(array_type & array,value_type * base,size_t end,rtx_type x)12438fd1498Szrj generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
12538fd1498Szrj 						    value_type *base,
12638fd1498Szrj 						    size_t end, rtx_type x)
12738fd1498Szrj {
12838fd1498Szrj   enum rtx_code code = GET_CODE (x);
12938fd1498Szrj   const char *format = GET_RTX_FORMAT (code);
13038fd1498Szrj   size_t orig_end = end;
13138fd1498Szrj   if (__builtin_expect (INSN_P (x), false))
13238fd1498Szrj     {
13338fd1498Szrj       /* Put the pattern at the top of the queue, since that's what
13438fd1498Szrj 	 we're likely to want most.  It also allows for the SEQUENCE
13538fd1498Szrj 	 code below.  */
13638fd1498Szrj       for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
13738fd1498Szrj 	if (format[i] == 'e')
13838fd1498Szrj 	  {
13938fd1498Szrj 	    value_type subx = T::get_value (x->u.fld[i].rt_rtx);
14038fd1498Szrj 	    if (__builtin_expect (end < LOCAL_ELEMS, true))
14138fd1498Szrj 	      base[end++] = subx;
14238fd1498Szrj 	    else
14338fd1498Szrj 	      base = add_single_to_queue (array, base, end++, subx);
14438fd1498Szrj 	  }
14538fd1498Szrj     }
14638fd1498Szrj   else
14738fd1498Szrj     for (int i = 0; format[i]; ++i)
14838fd1498Szrj       if (format[i] == 'e')
14938fd1498Szrj 	{
15038fd1498Szrj 	  value_type subx = T::get_value (x->u.fld[i].rt_rtx);
15138fd1498Szrj 	  if (__builtin_expect (end < LOCAL_ELEMS, true))
15238fd1498Szrj 	    base[end++] = subx;
15338fd1498Szrj 	  else
15438fd1498Szrj 	    base = add_single_to_queue (array, base, end++, subx);
15538fd1498Szrj 	}
15638fd1498Szrj       else if (format[i] == 'E')
15738fd1498Szrj 	{
15838fd1498Szrj 	  unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
15938fd1498Szrj 	  rtx *vec = x->u.fld[i].rt_rtvec->elem;
16038fd1498Szrj 	  if (__builtin_expect (end + length <= LOCAL_ELEMS, true))
16138fd1498Szrj 	    for (unsigned int j = 0; j < length; j++)
16238fd1498Szrj 	      base[end++] = T::get_value (vec[j]);
16338fd1498Szrj 	  else
16438fd1498Szrj 	    for (unsigned int j = 0; j < length; j++)
16538fd1498Szrj 	      base = add_single_to_queue (array, base, end++,
16638fd1498Szrj 					  T::get_value (vec[j]));
16738fd1498Szrj 	  if (code == SEQUENCE && end == length)
16838fd1498Szrj 	    /* If the subrtxes of the sequence fill the entire array then
16938fd1498Szrj 	       we know that no other parts of a containing insn are queued.
17038fd1498Szrj 	       The caller is therefore iterating over the sequence as a
17138fd1498Szrj 	       PATTERN (...), so we also want the patterns of the
17238fd1498Szrj 	       subinstructions.  */
17338fd1498Szrj 	    for (unsigned int j = 0; j < length; j++)
17438fd1498Szrj 	      {
17538fd1498Szrj 		typename T::rtx_type x = T::get_rtx (base[j]);
17638fd1498Szrj 		if (INSN_P (x))
17738fd1498Szrj 		  base[j] = T::get_value (PATTERN (x));
17838fd1498Szrj 	      }
17938fd1498Szrj 	}
18038fd1498Szrj   return end - orig_end;
18138fd1498Szrj }
18238fd1498Szrj 
18338fd1498Szrj template <typename T>
18438fd1498Szrj void
free_array(array_type & array)18538fd1498Szrj generic_subrtx_iterator <T>::free_array (array_type &array)
18638fd1498Szrj {
18738fd1498Szrj   vec_free (array.heap);
18838fd1498Szrj }
18938fd1498Szrj 
19038fd1498Szrj template <typename T>
19138fd1498Szrj const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
19238fd1498Szrj 
19338fd1498Szrj template class generic_subrtx_iterator <const_rtx_accessor>;
19438fd1498Szrj template class generic_subrtx_iterator <rtx_var_accessor>;
19538fd1498Szrj template class generic_subrtx_iterator <rtx_ptr_accessor>;
19638fd1498Szrj 
19738fd1498Szrj /* Return 1 if the value of X is unstable
19838fd1498Szrj    (would be different at a different point in the program).
19938fd1498Szrj    The frame pointer, arg pointer, etc. are considered stable
20038fd1498Szrj    (within one function) and so is anything marked `unchanging'.  */
20138fd1498Szrj 
20238fd1498Szrj int
rtx_unstable_p(const_rtx x)20338fd1498Szrj rtx_unstable_p (const_rtx x)
20438fd1498Szrj {
20538fd1498Szrj   const RTX_CODE code = GET_CODE (x);
20638fd1498Szrj   int i;
20738fd1498Szrj   const char *fmt;
20838fd1498Szrj 
20938fd1498Szrj   switch (code)
21038fd1498Szrj     {
21138fd1498Szrj     case MEM:
21238fd1498Szrj       return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
21338fd1498Szrj 
21438fd1498Szrj     case CONST:
21538fd1498Szrj     CASE_CONST_ANY:
21638fd1498Szrj     case SYMBOL_REF:
21738fd1498Szrj     case LABEL_REF:
21838fd1498Szrj       return 0;
21938fd1498Szrj 
22038fd1498Szrj     case REG:
22138fd1498Szrj       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
22238fd1498Szrj       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
22338fd1498Szrj 	  /* The arg pointer varies if it is not a fixed register.  */
22438fd1498Szrj 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
22538fd1498Szrj 	return 0;
22638fd1498Szrj       /* ??? When call-clobbered, the value is stable modulo the restore
22738fd1498Szrj 	 that must happen after a call.  This currently screws up local-alloc
22838fd1498Szrj 	 into believing that the restore is not needed.  */
22938fd1498Szrj       if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
23038fd1498Szrj 	return 0;
23138fd1498Szrj       return 1;
23238fd1498Szrj 
23338fd1498Szrj     case ASM_OPERANDS:
23438fd1498Szrj       if (MEM_VOLATILE_P (x))
23538fd1498Szrj 	return 1;
23638fd1498Szrj 
23738fd1498Szrj       /* Fall through.  */
23838fd1498Szrj 
23938fd1498Szrj     default:
24038fd1498Szrj       break;
24138fd1498Szrj     }
24238fd1498Szrj 
24338fd1498Szrj   fmt = GET_RTX_FORMAT (code);
24438fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
24538fd1498Szrj     if (fmt[i] == 'e')
24638fd1498Szrj       {
24738fd1498Szrj 	if (rtx_unstable_p (XEXP (x, i)))
24838fd1498Szrj 	  return 1;
24938fd1498Szrj       }
25038fd1498Szrj     else if (fmt[i] == 'E')
25138fd1498Szrj       {
25238fd1498Szrj 	int j;
25338fd1498Szrj 	for (j = 0; j < XVECLEN (x, i); j++)
25438fd1498Szrj 	  if (rtx_unstable_p (XVECEXP (x, i, j)))
25538fd1498Szrj 	    return 1;
25638fd1498Szrj       }
25738fd1498Szrj 
25838fd1498Szrj   return 0;
25938fd1498Szrj }
26038fd1498Szrj 
26138fd1498Szrj /* Return 1 if X has a value that can vary even between two
26238fd1498Szrj    executions of the program.  0 means X can be compared reliably
26338fd1498Szrj    against certain constants or near-constants.
26438fd1498Szrj    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
26538fd1498Szrj    zero, we are slightly more conservative.
26638fd1498Szrj    The frame pointer and the arg pointer are considered constant.  */
26738fd1498Szrj 
26838fd1498Szrj bool
rtx_varies_p(const_rtx x,bool for_alias)26938fd1498Szrj rtx_varies_p (const_rtx x, bool for_alias)
27038fd1498Szrj {
27138fd1498Szrj   RTX_CODE code;
27238fd1498Szrj   int i;
27338fd1498Szrj   const char *fmt;
27438fd1498Szrj 
27538fd1498Szrj   if (!x)
27638fd1498Szrj     return 0;
27738fd1498Szrj 
27838fd1498Szrj   code = GET_CODE (x);
27938fd1498Szrj   switch (code)
28038fd1498Szrj     {
28138fd1498Szrj     case MEM:
28238fd1498Szrj       return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
28338fd1498Szrj 
28438fd1498Szrj     case CONST:
28538fd1498Szrj     CASE_CONST_ANY:
28638fd1498Szrj     case SYMBOL_REF:
28738fd1498Szrj     case LABEL_REF:
28838fd1498Szrj       return 0;
28938fd1498Szrj 
29038fd1498Szrj     case REG:
29138fd1498Szrj       /* Note that we have to test for the actual rtx used for the frame
29238fd1498Szrj 	 and arg pointers and not just the register number in case we have
29338fd1498Szrj 	 eliminated the frame and/or arg pointer and are using it
29438fd1498Szrj 	 for pseudos.  */
29538fd1498Szrj       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
29638fd1498Szrj 	  /* The arg pointer varies if it is not a fixed register.  */
29738fd1498Szrj 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
29838fd1498Szrj 	return 0;
29938fd1498Szrj       if (x == pic_offset_table_rtx
30038fd1498Szrj 	  /* ??? When call-clobbered, the value is stable modulo the restore
30138fd1498Szrj 	     that must happen after a call.  This currently screws up
30238fd1498Szrj 	     local-alloc into believing that the restore is not needed, so we
30338fd1498Szrj 	     must return 0 only if we are called from alias analysis.  */
30438fd1498Szrj 	  && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
30538fd1498Szrj 	return 0;
30638fd1498Szrj       return 1;
30738fd1498Szrj 
30838fd1498Szrj     case LO_SUM:
30938fd1498Szrj       /* The operand 0 of a LO_SUM is considered constant
31038fd1498Szrj 	 (in fact it is related specifically to operand 1)
31138fd1498Szrj 	 during alias analysis.  */
31238fd1498Szrj       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
31338fd1498Szrj 	     || rtx_varies_p (XEXP (x, 1), for_alias);
31438fd1498Szrj 
31538fd1498Szrj     case ASM_OPERANDS:
31638fd1498Szrj       if (MEM_VOLATILE_P (x))
31738fd1498Szrj 	return 1;
31838fd1498Szrj 
31938fd1498Szrj       /* Fall through.  */
32038fd1498Szrj 
32138fd1498Szrj     default:
32238fd1498Szrj       break;
32338fd1498Szrj     }
32438fd1498Szrj 
32538fd1498Szrj   fmt = GET_RTX_FORMAT (code);
32638fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
32738fd1498Szrj     if (fmt[i] == 'e')
32838fd1498Szrj       {
32938fd1498Szrj 	if (rtx_varies_p (XEXP (x, i), for_alias))
33038fd1498Szrj 	  return 1;
33138fd1498Szrj       }
33238fd1498Szrj     else if (fmt[i] == 'E')
33338fd1498Szrj       {
33438fd1498Szrj 	int j;
33538fd1498Szrj 	for (j = 0; j < XVECLEN (x, i); j++)
33638fd1498Szrj 	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
33738fd1498Szrj 	    return 1;
33838fd1498Szrj       }
33938fd1498Szrj 
34038fd1498Szrj   return 0;
34138fd1498Szrj }
34238fd1498Szrj 
34338fd1498Szrj /* Compute an approximation for the offset between the register
34438fd1498Szrj    FROM and TO for the current function, as it was at the start
34538fd1498Szrj    of the routine.  */
34638fd1498Szrj 
34738fd1498Szrj static poly_int64
get_initial_register_offset(int from,int to)34838fd1498Szrj get_initial_register_offset (int from, int to)
34938fd1498Szrj {
35038fd1498Szrj   static const struct elim_table_t
35138fd1498Szrj   {
35238fd1498Szrj     const int from;
35338fd1498Szrj     const int to;
35438fd1498Szrj   } table[] = ELIMINABLE_REGS;
35538fd1498Szrj   poly_int64 offset1, offset2;
35638fd1498Szrj   unsigned int i, j;
35738fd1498Szrj 
35838fd1498Szrj   if (to == from)
35938fd1498Szrj     return 0;
36038fd1498Szrj 
361*e215fc28Szrj   /* It is not safe to call INITIAL_ELIMINATION_OFFSET before the epilogue
362*e215fc28Szrj      is completed, but we need to give at least an estimate for the stack
363*e215fc28Szrj      pointer based on the frame size.  */
364*e215fc28Szrj   if (!epilogue_completed)
36538fd1498Szrj     {
36638fd1498Szrj       offset1 = crtl->outgoing_args_size + get_frame_size ();
36738fd1498Szrj #if !STACK_GROWS_DOWNWARD
36838fd1498Szrj       offset1 = - offset1;
36938fd1498Szrj #endif
37038fd1498Szrj       if (to == STACK_POINTER_REGNUM)
37138fd1498Szrj 	return offset1;
37238fd1498Szrj       else if (from == STACK_POINTER_REGNUM)
37338fd1498Szrj 	return - offset1;
37438fd1498Szrj       else
37538fd1498Szrj 	return 0;
37638fd1498Szrj      }
37738fd1498Szrj 
37838fd1498Szrj   for (i = 0; i < ARRAY_SIZE (table); i++)
37938fd1498Szrj       if (table[i].from == from)
38038fd1498Szrj 	{
38138fd1498Szrj 	  if (table[i].to == to)
38238fd1498Szrj 	    {
38338fd1498Szrj 	      INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
38438fd1498Szrj 					  offset1);
38538fd1498Szrj 	      return offset1;
38638fd1498Szrj 	    }
38738fd1498Szrj 	  for (j = 0; j < ARRAY_SIZE (table); j++)
38838fd1498Szrj 	    {
38938fd1498Szrj 	      if (table[j].to == to
39038fd1498Szrj 		  && table[j].from == table[i].to)
39138fd1498Szrj 		{
39238fd1498Szrj 		  INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
39338fd1498Szrj 					      offset1);
39438fd1498Szrj 		  INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
39538fd1498Szrj 					      offset2);
39638fd1498Szrj 		  return offset1 + offset2;
39738fd1498Szrj 		}
39838fd1498Szrj 	      if (table[j].from == to
39938fd1498Szrj 		  && table[j].to == table[i].to)
40038fd1498Szrj 		{
40138fd1498Szrj 		  INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
40238fd1498Szrj 					      offset1);
40338fd1498Szrj 		  INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
40438fd1498Szrj 					      offset2);
40538fd1498Szrj 		  return offset1 - offset2;
40638fd1498Szrj 		}
40738fd1498Szrj 	    }
40838fd1498Szrj 	}
40938fd1498Szrj       else if (table[i].to == from)
41038fd1498Szrj 	{
41138fd1498Szrj 	  if (table[i].from == to)
41238fd1498Szrj 	    {
41338fd1498Szrj 	      INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
41438fd1498Szrj 					  offset1);
41538fd1498Szrj 	      return - offset1;
41638fd1498Szrj 	    }
41738fd1498Szrj 	  for (j = 0; j < ARRAY_SIZE (table); j++)
41838fd1498Szrj 	    {
41938fd1498Szrj 	      if (table[j].to == to
42038fd1498Szrj 		  && table[j].from == table[i].from)
42138fd1498Szrj 		{
42238fd1498Szrj 		  INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
42338fd1498Szrj 					      offset1);
42438fd1498Szrj 		  INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
42538fd1498Szrj 					      offset2);
42638fd1498Szrj 		  return - offset1 + offset2;
42738fd1498Szrj 		}
42838fd1498Szrj 	      if (table[j].from == to
42938fd1498Szrj 		  && table[j].to == table[i].from)
43038fd1498Szrj 		{
43138fd1498Szrj 		  INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
43238fd1498Szrj 					      offset1);
43338fd1498Szrj 		  INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
43438fd1498Szrj 					      offset2);
43538fd1498Szrj 		  return - offset1 - offset2;
43638fd1498Szrj 		}
43738fd1498Szrj 	    }
43838fd1498Szrj 	}
43938fd1498Szrj 
44038fd1498Szrj   /* If the requested register combination was not found,
44138fd1498Szrj      try a different more simple combination.  */
44238fd1498Szrj   if (from == ARG_POINTER_REGNUM)
44338fd1498Szrj     return get_initial_register_offset (HARD_FRAME_POINTER_REGNUM, to);
44438fd1498Szrj   else if (to == ARG_POINTER_REGNUM)
44538fd1498Szrj     return get_initial_register_offset (from, HARD_FRAME_POINTER_REGNUM);
44638fd1498Szrj   else if (from == HARD_FRAME_POINTER_REGNUM)
44738fd1498Szrj     return get_initial_register_offset (FRAME_POINTER_REGNUM, to);
44838fd1498Szrj   else if (to == HARD_FRAME_POINTER_REGNUM)
44938fd1498Szrj     return get_initial_register_offset (from, FRAME_POINTER_REGNUM);
45038fd1498Szrj   else
45138fd1498Szrj     return 0;
45238fd1498Szrj }
45338fd1498Szrj 
45438fd1498Szrj /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE
45538fd1498Szrj    bytes can cause a trap.  MODE is the mode of the MEM (not that of X) and
45638fd1498Szrj    UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
45738fd1498Szrj    references on strict alignment machines.  */
45838fd1498Szrj 
45938fd1498Szrj static int
rtx_addr_can_trap_p_1(const_rtx x,poly_int64 offset,poly_int64 size,machine_mode mode,bool unaligned_mems)46038fd1498Szrj rtx_addr_can_trap_p_1 (const_rtx x, poly_int64 offset, poly_int64 size,
46138fd1498Szrj 		       machine_mode mode, bool unaligned_mems)
46238fd1498Szrj {
46338fd1498Szrj   enum rtx_code code = GET_CODE (x);
46438fd1498Szrj   gcc_checking_assert (mode == BLKmode || known_size_p (size));
46538fd1498Szrj 
46638fd1498Szrj   /* The offset must be a multiple of the mode size if we are considering
46738fd1498Szrj      unaligned memory references on strict alignment machines.  */
46838fd1498Szrj   if (STRICT_ALIGNMENT && unaligned_mems && mode != BLKmode)
46938fd1498Szrj     {
47038fd1498Szrj       poly_int64 actual_offset = offset;
47138fd1498Szrj 
47238fd1498Szrj #ifdef SPARC_STACK_BOUNDARY_HACK
47338fd1498Szrj       /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
47438fd1498Szrj 	     the real alignment of %sp.  However, when it does this, the
47538fd1498Szrj 	     alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
47638fd1498Szrj       if (SPARC_STACK_BOUNDARY_HACK
47738fd1498Szrj 	  && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
47838fd1498Szrj 	actual_offset -= STACK_POINTER_OFFSET;
47938fd1498Szrj #endif
48038fd1498Szrj 
48138fd1498Szrj       if (!multiple_p (actual_offset, GET_MODE_SIZE (mode)))
48238fd1498Szrj 	return 1;
48338fd1498Szrj     }
48438fd1498Szrj 
48538fd1498Szrj   switch (code)
48638fd1498Szrj     {
48738fd1498Szrj     case SYMBOL_REF:
48838fd1498Szrj       if (SYMBOL_REF_WEAK (x))
48938fd1498Szrj 	return 1;
49038fd1498Szrj       if (!CONSTANT_POOL_ADDRESS_P (x) && !SYMBOL_REF_FUNCTION_P (x))
49138fd1498Szrj 	{
49238fd1498Szrj 	  tree decl;
49338fd1498Szrj 	  poly_int64 decl_size;
49438fd1498Szrj 
49538fd1498Szrj 	  if (maybe_lt (offset, 0))
49638fd1498Szrj 	    return 1;
49738fd1498Szrj 	  if (!known_size_p (size))
49838fd1498Szrj 	    return maybe_ne (offset, 0);
49938fd1498Szrj 
50038fd1498Szrj 	  /* If the size of the access or of the symbol is unknown,
50138fd1498Szrj 	     assume the worst.  */
50238fd1498Szrj 	  decl = SYMBOL_REF_DECL (x);
50338fd1498Szrj 
50438fd1498Szrj 	  /* Else check that the access is in bounds.  TODO: restructure
50538fd1498Szrj 	     expr_size/tree_expr_size/int_expr_size and just use the latter.  */
50638fd1498Szrj 	  if (!decl)
50738fd1498Szrj 	    decl_size = -1;
50838fd1498Szrj 	  else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
50938fd1498Szrj 	    {
51038fd1498Szrj 	      if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &decl_size))
51138fd1498Szrj 		decl_size = -1;
51238fd1498Szrj 	    }
51338fd1498Szrj 	  else if (TREE_CODE (decl) == STRING_CST)
51438fd1498Szrj 	    decl_size = TREE_STRING_LENGTH (decl);
51538fd1498Szrj 	  else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
51638fd1498Szrj 	    decl_size = int_size_in_bytes (TREE_TYPE (decl));
51738fd1498Szrj 	  else
51838fd1498Szrj 	    decl_size = -1;
51938fd1498Szrj 
52038fd1498Szrj 	  return (!known_size_p (decl_size) || known_eq (decl_size, 0)
52138fd1498Szrj 		  ? maybe_ne (offset, 0)
52238fd1498Szrj 		  : maybe_gt (offset + size, decl_size));
52338fd1498Szrj         }
52438fd1498Szrj 
52538fd1498Szrj       return 0;
52638fd1498Szrj 
52738fd1498Szrj     case LABEL_REF:
52838fd1498Szrj       return 0;
52938fd1498Szrj 
53038fd1498Szrj     case REG:
53138fd1498Szrj       /* Stack references are assumed not to trap, but we need to deal with
53238fd1498Szrj 	 nonsensical offsets.  */
53338fd1498Szrj       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
53438fd1498Szrj 	 || x == stack_pointer_rtx
53538fd1498Szrj 	 /* The arg pointer varies if it is not a fixed register.  */
53638fd1498Szrj 	 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
53738fd1498Szrj 	{
53838fd1498Szrj #ifdef RED_ZONE_SIZE
53938fd1498Szrj 	  poly_int64 red_zone_size = RED_ZONE_SIZE;
54038fd1498Szrj #else
54138fd1498Szrj 	  poly_int64 red_zone_size = 0;
54238fd1498Szrj #endif
54338fd1498Szrj 	  poly_int64 stack_boundary = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
54438fd1498Szrj 	  poly_int64 low_bound, high_bound;
54538fd1498Szrj 
54638fd1498Szrj 	  if (!known_size_p (size))
54738fd1498Szrj 	    return 1;
54838fd1498Szrj 
54938fd1498Szrj 	  if (x == frame_pointer_rtx)
55038fd1498Szrj 	    {
55138fd1498Szrj 	      if (FRAME_GROWS_DOWNWARD)
55238fd1498Szrj 		{
55338fd1498Szrj 		  high_bound = targetm.starting_frame_offset ();
55438fd1498Szrj 		  low_bound  = high_bound - get_frame_size ();
55538fd1498Szrj 		}
55638fd1498Szrj 	      else
55738fd1498Szrj 		{
55838fd1498Szrj 		  low_bound  = targetm.starting_frame_offset ();
55938fd1498Szrj 		  high_bound = low_bound + get_frame_size ();
56038fd1498Szrj 		}
56138fd1498Szrj 	    }
56238fd1498Szrj 	  else if (x == hard_frame_pointer_rtx)
56338fd1498Szrj 	    {
56438fd1498Szrj 	      poly_int64 sp_offset
56538fd1498Szrj 		= get_initial_register_offset (STACK_POINTER_REGNUM,
56638fd1498Szrj 					       HARD_FRAME_POINTER_REGNUM);
56738fd1498Szrj 	      poly_int64 ap_offset
56838fd1498Szrj 		= get_initial_register_offset (ARG_POINTER_REGNUM,
56938fd1498Szrj 					       HARD_FRAME_POINTER_REGNUM);
57038fd1498Szrj 
57138fd1498Szrj #if STACK_GROWS_DOWNWARD
57238fd1498Szrj 	      low_bound  = sp_offset - red_zone_size - stack_boundary;
57338fd1498Szrj 	      high_bound = ap_offset
57438fd1498Szrj 			   + FIRST_PARM_OFFSET (current_function_decl)
57538fd1498Szrj #if !ARGS_GROW_DOWNWARD
57638fd1498Szrj 			   + crtl->args.size
57738fd1498Szrj #endif
57838fd1498Szrj 			   + stack_boundary;
57938fd1498Szrj #else
58038fd1498Szrj 	      high_bound = sp_offset + red_zone_size + stack_boundary;
58138fd1498Szrj 	      low_bound  = ap_offset
58238fd1498Szrj 			   + FIRST_PARM_OFFSET (current_function_decl)
58338fd1498Szrj #if ARGS_GROW_DOWNWARD
58438fd1498Szrj 			   - crtl->args.size
58538fd1498Szrj #endif
58638fd1498Szrj 			   - stack_boundary;
58738fd1498Szrj #endif
58838fd1498Szrj 	    }
58938fd1498Szrj 	  else if (x == stack_pointer_rtx)
59038fd1498Szrj 	    {
59138fd1498Szrj 	      poly_int64 ap_offset
59238fd1498Szrj 		= get_initial_register_offset (ARG_POINTER_REGNUM,
59338fd1498Szrj 					       STACK_POINTER_REGNUM);
59438fd1498Szrj 
59538fd1498Szrj #if STACK_GROWS_DOWNWARD
59638fd1498Szrj 	      low_bound  = - red_zone_size - stack_boundary;
59738fd1498Szrj 	      high_bound = ap_offset
59838fd1498Szrj 			   + FIRST_PARM_OFFSET (current_function_decl)
59938fd1498Szrj #if !ARGS_GROW_DOWNWARD
60038fd1498Szrj 			   + crtl->args.size
60138fd1498Szrj #endif
60238fd1498Szrj 			   + stack_boundary;
60338fd1498Szrj #else
60438fd1498Szrj 	      high_bound = red_zone_size + stack_boundary;
60538fd1498Szrj 	      low_bound  = ap_offset
60638fd1498Szrj 			   + FIRST_PARM_OFFSET (current_function_decl)
60738fd1498Szrj #if ARGS_GROW_DOWNWARD
60838fd1498Szrj 			   - crtl->args.size
60938fd1498Szrj #endif
61038fd1498Szrj 			   - stack_boundary;
61138fd1498Szrj #endif
61238fd1498Szrj 	    }
61338fd1498Szrj 	  else
61438fd1498Szrj 	    {
61538fd1498Szrj 	      /* We assume that accesses are safe to at least the
61638fd1498Szrj 		 next stack boundary.
61738fd1498Szrj 		 Examples are varargs and __builtin_return_address.  */
61838fd1498Szrj #if ARGS_GROW_DOWNWARD
61938fd1498Szrj 	      high_bound = FIRST_PARM_OFFSET (current_function_decl)
62038fd1498Szrj 			   + stack_boundary;
62138fd1498Szrj 	      low_bound  = FIRST_PARM_OFFSET (current_function_decl)
62238fd1498Szrj 			   - crtl->args.size - stack_boundary;
62338fd1498Szrj #else
62438fd1498Szrj 	      low_bound  = FIRST_PARM_OFFSET (current_function_decl)
62538fd1498Szrj 			   - stack_boundary;
62638fd1498Szrj 	      high_bound = FIRST_PARM_OFFSET (current_function_decl)
62738fd1498Szrj 			   + crtl->args.size + stack_boundary;
62838fd1498Szrj #endif
62938fd1498Szrj 	    }
63038fd1498Szrj 
63138fd1498Szrj 	  if (known_ge (offset, low_bound)
63238fd1498Szrj 	      && known_le (offset, high_bound - size))
63338fd1498Szrj 	    return 0;
63438fd1498Szrj 	  return 1;
63538fd1498Szrj 	}
63638fd1498Szrj       /* All of the virtual frame registers are stack references.  */
63738fd1498Szrj       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
63838fd1498Szrj 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
63938fd1498Szrj 	return 0;
64038fd1498Szrj       return 1;
64138fd1498Szrj 
64238fd1498Szrj     case CONST:
64338fd1498Szrj       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
64438fd1498Szrj 				    mode, unaligned_mems);
64538fd1498Szrj 
64638fd1498Szrj     case PLUS:
64738fd1498Szrj       /* An address is assumed not to trap if:
64838fd1498Szrj 	 - it is the pic register plus a const unspec without offset.  */
64938fd1498Szrj       if (XEXP (x, 0) == pic_offset_table_rtx
65038fd1498Szrj 	  && GET_CODE (XEXP (x, 1)) == CONST
65138fd1498Szrj 	  && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
65238fd1498Szrj 	  && known_eq (offset, 0))
65338fd1498Szrj 	return 0;
65438fd1498Szrj 
65538fd1498Szrj       /* - or it is an address that can't trap plus a constant integer.  */
65638fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
65738fd1498Szrj 	  && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
65838fd1498Szrj 				     size, mode, unaligned_mems))
65938fd1498Szrj 	return 0;
66038fd1498Szrj 
66138fd1498Szrj       return 1;
66238fd1498Szrj 
66338fd1498Szrj     case LO_SUM:
66438fd1498Szrj     case PRE_MODIFY:
66538fd1498Szrj       return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
66638fd1498Szrj 				    mode, unaligned_mems);
66738fd1498Szrj 
66838fd1498Szrj     case PRE_DEC:
66938fd1498Szrj     case PRE_INC:
67038fd1498Szrj     case POST_DEC:
67138fd1498Szrj     case POST_INC:
67238fd1498Szrj     case POST_MODIFY:
67338fd1498Szrj       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
67438fd1498Szrj 				    mode, unaligned_mems);
67538fd1498Szrj 
67638fd1498Szrj     default:
67738fd1498Szrj       break;
67838fd1498Szrj     }
67938fd1498Szrj 
68038fd1498Szrj   /* If it isn't one of the case above, it can cause a trap.  */
68138fd1498Szrj   return 1;
68238fd1498Szrj }
68338fd1498Szrj 
68438fd1498Szrj /* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
68538fd1498Szrj 
68638fd1498Szrj int
rtx_addr_can_trap_p(const_rtx x)68738fd1498Szrj rtx_addr_can_trap_p (const_rtx x)
68838fd1498Szrj {
68938fd1498Szrj   return rtx_addr_can_trap_p_1 (x, 0, -1, BLKmode, false);
69038fd1498Szrj }
69138fd1498Szrj 
69238fd1498Szrj /* Return true if X contains a MEM subrtx.  */
69338fd1498Szrj 
69438fd1498Szrj bool
contains_mem_rtx_p(rtx x)69538fd1498Szrj contains_mem_rtx_p (rtx x)
69638fd1498Szrj {
69738fd1498Szrj   subrtx_iterator::array_type array;
69838fd1498Szrj   FOR_EACH_SUBRTX (iter, array, x, ALL)
69938fd1498Szrj     if (MEM_P (*iter))
70038fd1498Szrj       return true;
70138fd1498Szrj 
70238fd1498Szrj   return false;
70338fd1498Szrj }
70438fd1498Szrj 
70538fd1498Szrj /* Return true if X is an address that is known to not be zero.  */
70638fd1498Szrj 
70738fd1498Szrj bool
nonzero_address_p(const_rtx x)70838fd1498Szrj nonzero_address_p (const_rtx x)
70938fd1498Szrj {
71038fd1498Szrj   const enum rtx_code code = GET_CODE (x);
71138fd1498Szrj 
71238fd1498Szrj   switch (code)
71338fd1498Szrj     {
71438fd1498Szrj     case SYMBOL_REF:
71538fd1498Szrj       return flag_delete_null_pointer_checks && !SYMBOL_REF_WEAK (x);
71638fd1498Szrj 
71738fd1498Szrj     case LABEL_REF:
71838fd1498Szrj       return true;
71938fd1498Szrj 
72038fd1498Szrj     case REG:
72138fd1498Szrj       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
72238fd1498Szrj       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
72338fd1498Szrj 	  || x == stack_pointer_rtx
72438fd1498Szrj 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
72538fd1498Szrj 	return true;
72638fd1498Szrj       /* All of the virtual frame registers are stack references.  */
72738fd1498Szrj       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
72838fd1498Szrj 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
72938fd1498Szrj 	return true;
73038fd1498Szrj       return false;
73138fd1498Szrj 
73238fd1498Szrj     case CONST:
73338fd1498Szrj       return nonzero_address_p (XEXP (x, 0));
73438fd1498Szrj 
73538fd1498Szrj     case PLUS:
73638fd1498Szrj       /* Handle PIC references.  */
73738fd1498Szrj       if (XEXP (x, 0) == pic_offset_table_rtx
73838fd1498Szrj 	       && CONSTANT_P (XEXP (x, 1)))
73938fd1498Szrj 	return true;
74038fd1498Szrj       return false;
74138fd1498Szrj 
74238fd1498Szrj     case PRE_MODIFY:
74338fd1498Szrj       /* Similar to the above; allow positive offsets.  Further, since
74438fd1498Szrj 	 auto-inc is only allowed in memories, the register must be a
74538fd1498Szrj 	 pointer.  */
74638fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
74738fd1498Szrj 	  && INTVAL (XEXP (x, 1)) > 0)
74838fd1498Szrj 	return true;
74938fd1498Szrj       return nonzero_address_p (XEXP (x, 0));
75038fd1498Szrj 
75138fd1498Szrj     case PRE_INC:
75238fd1498Szrj       /* Similarly.  Further, the offset is always positive.  */
75338fd1498Szrj       return true;
75438fd1498Szrj 
75538fd1498Szrj     case PRE_DEC:
75638fd1498Szrj     case POST_DEC:
75738fd1498Szrj     case POST_INC:
75838fd1498Szrj     case POST_MODIFY:
75938fd1498Szrj       return nonzero_address_p (XEXP (x, 0));
76038fd1498Szrj 
76138fd1498Szrj     case LO_SUM:
76238fd1498Szrj       return nonzero_address_p (XEXP (x, 1));
76338fd1498Szrj 
76438fd1498Szrj     default:
76538fd1498Szrj       break;
76638fd1498Szrj     }
76738fd1498Szrj 
76838fd1498Szrj   /* If it isn't one of the case above, might be zero.  */
76938fd1498Szrj   return false;
77038fd1498Szrj }
77138fd1498Szrj 
77238fd1498Szrj /* Return 1 if X refers to a memory location whose address
77338fd1498Szrj    cannot be compared reliably with constant addresses,
77438fd1498Szrj    or if X refers to a BLKmode memory object.
77538fd1498Szrj    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
77638fd1498Szrj    zero, we are slightly more conservative.  */
77738fd1498Szrj 
77838fd1498Szrj bool
rtx_addr_varies_p(const_rtx x,bool for_alias)77938fd1498Szrj rtx_addr_varies_p (const_rtx x, bool for_alias)
78038fd1498Szrj {
78138fd1498Szrj   enum rtx_code code;
78238fd1498Szrj   int i;
78338fd1498Szrj   const char *fmt;
78438fd1498Szrj 
78538fd1498Szrj   if (x == 0)
78638fd1498Szrj     return 0;
78738fd1498Szrj 
78838fd1498Szrj   code = GET_CODE (x);
78938fd1498Szrj   if (code == MEM)
79038fd1498Szrj     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
79138fd1498Szrj 
79238fd1498Szrj   fmt = GET_RTX_FORMAT (code);
79338fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
79438fd1498Szrj     if (fmt[i] == 'e')
79538fd1498Szrj       {
79638fd1498Szrj 	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
79738fd1498Szrj 	  return 1;
79838fd1498Szrj       }
79938fd1498Szrj     else if (fmt[i] == 'E')
80038fd1498Szrj       {
80138fd1498Szrj 	int j;
80238fd1498Szrj 	for (j = 0; j < XVECLEN (x, i); j++)
80338fd1498Szrj 	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
80438fd1498Szrj 	    return 1;
80538fd1498Szrj       }
80638fd1498Szrj   return 0;
80738fd1498Szrj }
80838fd1498Szrj 
80938fd1498Szrj /* Return the CALL in X if there is one.  */
81038fd1498Szrj 
81138fd1498Szrj rtx
get_call_rtx_from(rtx x)81238fd1498Szrj get_call_rtx_from (rtx x)
81338fd1498Szrj {
81438fd1498Szrj   if (INSN_P (x))
81538fd1498Szrj     x = PATTERN (x);
81638fd1498Szrj   if (GET_CODE (x) == PARALLEL)
81738fd1498Szrj     x = XVECEXP (x, 0, 0);
81838fd1498Szrj   if (GET_CODE (x) == SET)
81938fd1498Szrj     x = SET_SRC (x);
82038fd1498Szrj   if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
82138fd1498Szrj     return x;
82238fd1498Szrj   return NULL_RTX;
82338fd1498Szrj }
82438fd1498Szrj 
82538fd1498Szrj /* Return the value of the integer term in X, if one is apparent;
82638fd1498Szrj    otherwise return 0.
82738fd1498Szrj    Only obvious integer terms are detected.
82838fd1498Szrj    This is used in cse.c with the `related_value' field.  */
82938fd1498Szrj 
83038fd1498Szrj HOST_WIDE_INT
get_integer_term(const_rtx x)83138fd1498Szrj get_integer_term (const_rtx x)
83238fd1498Szrj {
83338fd1498Szrj   if (GET_CODE (x) == CONST)
83438fd1498Szrj     x = XEXP (x, 0);
83538fd1498Szrj 
83638fd1498Szrj   if (GET_CODE (x) == MINUS
83738fd1498Szrj       && CONST_INT_P (XEXP (x, 1)))
83838fd1498Szrj     return - INTVAL (XEXP (x, 1));
83938fd1498Szrj   if (GET_CODE (x) == PLUS
84038fd1498Szrj       && CONST_INT_P (XEXP (x, 1)))
84138fd1498Szrj     return INTVAL (XEXP (x, 1));
84238fd1498Szrj   return 0;
84338fd1498Szrj }
84438fd1498Szrj 
84538fd1498Szrj /* If X is a constant, return the value sans apparent integer term;
84638fd1498Szrj    otherwise return 0.
84738fd1498Szrj    Only obvious integer terms are detected.  */
84838fd1498Szrj 
84938fd1498Szrj rtx
get_related_value(const_rtx x)85038fd1498Szrj get_related_value (const_rtx x)
85138fd1498Szrj {
85238fd1498Szrj   if (GET_CODE (x) != CONST)
85338fd1498Szrj     return 0;
85438fd1498Szrj   x = XEXP (x, 0);
85538fd1498Szrj   if (GET_CODE (x) == PLUS
85638fd1498Szrj       && CONST_INT_P (XEXP (x, 1)))
85738fd1498Szrj     return XEXP (x, 0);
85838fd1498Szrj   else if (GET_CODE (x) == MINUS
85938fd1498Szrj 	   && CONST_INT_P (XEXP (x, 1)))
86038fd1498Szrj     return XEXP (x, 0);
86138fd1498Szrj   return 0;
86238fd1498Szrj }
86338fd1498Szrj 
86438fd1498Szrj /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
86538fd1498Szrj    to somewhere in the same object or object_block as SYMBOL.  */
86638fd1498Szrj 
86738fd1498Szrj bool
offset_within_block_p(const_rtx symbol,HOST_WIDE_INT offset)86838fd1498Szrj offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
86938fd1498Szrj {
87038fd1498Szrj   tree decl;
87138fd1498Szrj 
87238fd1498Szrj   if (GET_CODE (symbol) != SYMBOL_REF)
87338fd1498Szrj     return false;
87438fd1498Szrj 
87538fd1498Szrj   if (offset == 0)
87638fd1498Szrj     return true;
87738fd1498Szrj 
87838fd1498Szrj   if (offset > 0)
87938fd1498Szrj     {
88038fd1498Szrj       if (CONSTANT_POOL_ADDRESS_P (symbol)
88138fd1498Szrj 	  && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
88238fd1498Szrj 	return true;
88338fd1498Szrj 
88438fd1498Szrj       decl = SYMBOL_REF_DECL (symbol);
88538fd1498Szrj       if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
88638fd1498Szrj 	return true;
88738fd1498Szrj     }
88838fd1498Szrj 
88938fd1498Szrj   if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
89038fd1498Szrj       && SYMBOL_REF_BLOCK (symbol)
89138fd1498Szrj       && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
89238fd1498Szrj       && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
89338fd1498Szrj 	  < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
89438fd1498Szrj     return true;
89538fd1498Szrj 
89638fd1498Szrj   return false;
89738fd1498Szrj }
89838fd1498Szrj 
89938fd1498Szrj /* Split X into a base and a constant offset, storing them in *BASE_OUT
90038fd1498Szrj    and *OFFSET_OUT respectively.  */
90138fd1498Szrj 
90238fd1498Szrj void
split_const(rtx x,rtx * base_out,rtx * offset_out)90338fd1498Szrj split_const (rtx x, rtx *base_out, rtx *offset_out)
90438fd1498Szrj {
90538fd1498Szrj   if (GET_CODE (x) == CONST)
90638fd1498Szrj     {
90738fd1498Szrj       x = XEXP (x, 0);
90838fd1498Szrj       if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
90938fd1498Szrj 	{
91038fd1498Szrj 	  *base_out = XEXP (x, 0);
91138fd1498Szrj 	  *offset_out = XEXP (x, 1);
91238fd1498Szrj 	  return;
91338fd1498Szrj 	}
91438fd1498Szrj     }
91538fd1498Szrj   *base_out = x;
91638fd1498Szrj   *offset_out = const0_rtx;
91738fd1498Szrj }
91838fd1498Szrj 
91938fd1498Szrj /* Express integer value X as some value Y plus a polynomial offset,
92038fd1498Szrj    where Y is either const0_rtx, X or something within X (as opposed
92138fd1498Szrj    to a new rtx).  Return the Y and store the offset in *OFFSET_OUT.  */
92238fd1498Szrj 
92338fd1498Szrj rtx
strip_offset(rtx x,poly_int64_pod * offset_out)92438fd1498Szrj strip_offset (rtx x, poly_int64_pod *offset_out)
92538fd1498Szrj {
92638fd1498Szrj   rtx base = const0_rtx;
92738fd1498Szrj   rtx test = x;
92838fd1498Szrj   if (GET_CODE (test) == CONST)
92938fd1498Szrj     test = XEXP (test, 0);
93038fd1498Szrj   if (GET_CODE (test) == PLUS)
93138fd1498Szrj     {
93238fd1498Szrj       base = XEXP (test, 0);
93338fd1498Szrj       test = XEXP (test, 1);
93438fd1498Szrj     }
93538fd1498Szrj   if (poly_int_rtx_p (test, offset_out))
93638fd1498Szrj     return base;
93738fd1498Szrj   *offset_out = 0;
93838fd1498Szrj   return x;
93938fd1498Szrj }
94038fd1498Szrj 
94138fd1498Szrj /* Return the argument size in REG_ARGS_SIZE note X.  */
94238fd1498Szrj 
94338fd1498Szrj poly_int64
get_args_size(const_rtx x)94438fd1498Szrj get_args_size (const_rtx x)
94538fd1498Szrj {
94638fd1498Szrj   gcc_checking_assert (REG_NOTE_KIND (x) == REG_ARGS_SIZE);
94738fd1498Szrj   return rtx_to_poly_int64 (XEXP (x, 0));
94838fd1498Szrj }
94938fd1498Szrj 
95038fd1498Szrj /* Return the number of places FIND appears within X.  If COUNT_DEST is
95138fd1498Szrj    zero, we do not count occurrences inside the destination of a SET.  */
95238fd1498Szrj 
95338fd1498Szrj int
count_occurrences(const_rtx x,const_rtx find,int count_dest)95438fd1498Szrj count_occurrences (const_rtx x, const_rtx find, int count_dest)
95538fd1498Szrj {
95638fd1498Szrj   int i, j;
95738fd1498Szrj   enum rtx_code code;
95838fd1498Szrj   const char *format_ptr;
95938fd1498Szrj   int count;
96038fd1498Szrj 
96138fd1498Szrj   if (x == find)
96238fd1498Szrj     return 1;
96338fd1498Szrj 
96438fd1498Szrj   code = GET_CODE (x);
96538fd1498Szrj 
96638fd1498Szrj   switch (code)
96738fd1498Szrj     {
96838fd1498Szrj     case REG:
96938fd1498Szrj     CASE_CONST_ANY:
97038fd1498Szrj     case SYMBOL_REF:
97138fd1498Szrj     case CODE_LABEL:
97238fd1498Szrj     case PC:
97338fd1498Szrj     case CC0:
97438fd1498Szrj       return 0;
97538fd1498Szrj 
97638fd1498Szrj     case EXPR_LIST:
97738fd1498Szrj       count = count_occurrences (XEXP (x, 0), find, count_dest);
97838fd1498Szrj       if (XEXP (x, 1))
97938fd1498Szrj 	count += count_occurrences (XEXP (x, 1), find, count_dest);
98038fd1498Szrj       return count;
98138fd1498Szrj 
98238fd1498Szrj     case MEM:
98338fd1498Szrj       if (MEM_P (find) && rtx_equal_p (x, find))
98438fd1498Szrj 	return 1;
98538fd1498Szrj       break;
98638fd1498Szrj 
98738fd1498Szrj     case SET:
98838fd1498Szrj       if (SET_DEST (x) == find && ! count_dest)
98938fd1498Szrj 	return count_occurrences (SET_SRC (x), find, count_dest);
99038fd1498Szrj       break;
99138fd1498Szrj 
99238fd1498Szrj     default:
99338fd1498Szrj       break;
99438fd1498Szrj     }
99538fd1498Szrj 
99638fd1498Szrj   format_ptr = GET_RTX_FORMAT (code);
99738fd1498Szrj   count = 0;
99838fd1498Szrj 
99938fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++)
100038fd1498Szrj     {
100138fd1498Szrj       switch (*format_ptr++)
100238fd1498Szrj 	{
100338fd1498Szrj 	case 'e':
100438fd1498Szrj 	  count += count_occurrences (XEXP (x, i), find, count_dest);
100538fd1498Szrj 	  break;
100638fd1498Szrj 
100738fd1498Szrj 	case 'E':
100838fd1498Szrj 	  for (j = 0; j < XVECLEN (x, i); j++)
100938fd1498Szrj 	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
101038fd1498Szrj 	  break;
101138fd1498Szrj 	}
101238fd1498Szrj     }
101338fd1498Szrj   return count;
101438fd1498Szrj }
101538fd1498Szrj 
101638fd1498Szrj 
101738fd1498Szrj /* Return TRUE if OP is a register or subreg of a register that
101838fd1498Szrj    holds an unsigned quantity.  Otherwise, return FALSE.  */
101938fd1498Szrj 
102038fd1498Szrj bool
unsigned_reg_p(rtx op)102138fd1498Szrj unsigned_reg_p (rtx op)
102238fd1498Szrj {
102338fd1498Szrj   if (REG_P (op)
102438fd1498Szrj       && REG_EXPR (op)
102538fd1498Szrj       && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
102638fd1498Szrj     return true;
102738fd1498Szrj 
102838fd1498Szrj   if (GET_CODE (op) == SUBREG
102938fd1498Szrj       && SUBREG_PROMOTED_SIGN (op))
103038fd1498Szrj     return true;
103138fd1498Szrj 
103238fd1498Szrj   return false;
103338fd1498Szrj }
103438fd1498Szrj 
103538fd1498Szrj 
103638fd1498Szrj /* Nonzero if register REG appears somewhere within IN.
103738fd1498Szrj    Also works if REG is not a register; in this case it checks
103838fd1498Szrj    for a subexpression of IN that is Lisp "equal" to REG.  */
103938fd1498Szrj 
104038fd1498Szrj int
reg_mentioned_p(const_rtx reg,const_rtx in)104138fd1498Szrj reg_mentioned_p (const_rtx reg, const_rtx in)
104238fd1498Szrj {
104338fd1498Szrj   const char *fmt;
104438fd1498Szrj   int i;
104538fd1498Szrj   enum rtx_code code;
104638fd1498Szrj 
104738fd1498Szrj   if (in == 0)
104838fd1498Szrj     return 0;
104938fd1498Szrj 
105038fd1498Szrj   if (reg == in)
105138fd1498Szrj     return 1;
105238fd1498Szrj 
105338fd1498Szrj   if (GET_CODE (in) == LABEL_REF)
105438fd1498Szrj     return reg == label_ref_label (in);
105538fd1498Szrj 
105638fd1498Szrj   code = GET_CODE (in);
105738fd1498Szrj 
105838fd1498Szrj   switch (code)
105938fd1498Szrj     {
106038fd1498Szrj       /* Compare registers by number.  */
106138fd1498Szrj     case REG:
106238fd1498Szrj       return REG_P (reg) && REGNO (in) == REGNO (reg);
106338fd1498Szrj 
106438fd1498Szrj       /* These codes have no constituent expressions
106538fd1498Szrj 	 and are unique.  */
106638fd1498Szrj     case SCRATCH:
106738fd1498Szrj     case CC0:
106838fd1498Szrj     case PC:
106938fd1498Szrj       return 0;
107038fd1498Szrj 
107138fd1498Szrj     CASE_CONST_ANY:
107238fd1498Szrj       /* These are kept unique for a given value.  */
107338fd1498Szrj       return 0;
107438fd1498Szrj 
107538fd1498Szrj     default:
107638fd1498Szrj       break;
107738fd1498Szrj     }
107838fd1498Szrj 
107938fd1498Szrj   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
108038fd1498Szrj     return 1;
108138fd1498Szrj 
108238fd1498Szrj   fmt = GET_RTX_FORMAT (code);
108338fd1498Szrj 
108438fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
108538fd1498Szrj     {
108638fd1498Szrj       if (fmt[i] == 'E')
108738fd1498Szrj 	{
108838fd1498Szrj 	  int j;
108938fd1498Szrj 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
109038fd1498Szrj 	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
109138fd1498Szrj 	      return 1;
109238fd1498Szrj 	}
109338fd1498Szrj       else if (fmt[i] == 'e'
109438fd1498Szrj 	       && reg_mentioned_p (reg, XEXP (in, i)))
109538fd1498Szrj 	return 1;
109638fd1498Szrj     }
109738fd1498Szrj   return 0;
109838fd1498Szrj }
109938fd1498Szrj 
110038fd1498Szrj /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
110138fd1498Szrj    no CODE_LABEL insn.  */
110238fd1498Szrj 
110338fd1498Szrj int
no_labels_between_p(const rtx_insn * beg,const rtx_insn * end)110438fd1498Szrj no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
110538fd1498Szrj {
110638fd1498Szrj   rtx_insn *p;
110738fd1498Szrj   if (beg == end)
110838fd1498Szrj     return 0;
110938fd1498Szrj   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
111038fd1498Szrj     if (LABEL_P (p))
111138fd1498Szrj       return 0;
111238fd1498Szrj   return 1;
111338fd1498Szrj }
111438fd1498Szrj 
111538fd1498Szrj /* Nonzero if register REG is used in an insn between
111638fd1498Szrj    FROM_INSN and TO_INSN (exclusive of those two).  */
111738fd1498Szrj 
111838fd1498Szrj int
reg_used_between_p(const_rtx reg,const rtx_insn * from_insn,const rtx_insn * to_insn)111938fd1498Szrj reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
112038fd1498Szrj 		    const rtx_insn *to_insn)
112138fd1498Szrj {
112238fd1498Szrj   rtx_insn *insn;
112338fd1498Szrj 
112438fd1498Szrj   if (from_insn == to_insn)
112538fd1498Szrj     return 0;
112638fd1498Szrj 
112738fd1498Szrj   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
112838fd1498Szrj     if (NONDEBUG_INSN_P (insn)
112938fd1498Szrj 	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
113038fd1498Szrj 	   || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
113138fd1498Szrj       return 1;
113238fd1498Szrj   return 0;
113338fd1498Szrj }
113438fd1498Szrj 
113538fd1498Szrj /* Nonzero if the old value of X, a register, is referenced in BODY.  If X
113638fd1498Szrj    is entirely replaced by a new value and the only use is as a SET_DEST,
113738fd1498Szrj    we do not consider it a reference.  */
113838fd1498Szrj 
113938fd1498Szrj int
reg_referenced_p(const_rtx x,const_rtx body)114038fd1498Szrj reg_referenced_p (const_rtx x, const_rtx body)
114138fd1498Szrj {
114238fd1498Szrj   int i;
114338fd1498Szrj 
114438fd1498Szrj   switch (GET_CODE (body))
114538fd1498Szrj     {
114638fd1498Szrj     case SET:
114738fd1498Szrj       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
114838fd1498Szrj 	return 1;
114938fd1498Szrj 
115038fd1498Szrj       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
115138fd1498Szrj 	 of a REG that occupies all of the REG, the insn references X if
115238fd1498Szrj 	 it is mentioned in the destination.  */
115338fd1498Szrj       if (GET_CODE (SET_DEST (body)) != CC0
115438fd1498Szrj 	  && GET_CODE (SET_DEST (body)) != PC
115538fd1498Szrj 	  && !REG_P (SET_DEST (body))
115638fd1498Szrj 	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
115738fd1498Szrj 		&& REG_P (SUBREG_REG (SET_DEST (body)))
115838fd1498Szrj 		&& !read_modify_subreg_p (SET_DEST (body)))
115938fd1498Szrj 	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
116038fd1498Szrj 	return 1;
116138fd1498Szrj       return 0;
116238fd1498Szrj 
116338fd1498Szrj     case ASM_OPERANDS:
116438fd1498Szrj       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
116538fd1498Szrj 	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
116638fd1498Szrj 	  return 1;
116738fd1498Szrj       return 0;
116838fd1498Szrj 
116938fd1498Szrj     case CALL:
117038fd1498Szrj     case USE:
117138fd1498Szrj     case IF_THEN_ELSE:
117238fd1498Szrj       return reg_overlap_mentioned_p (x, body);
117338fd1498Szrj 
117438fd1498Szrj     case TRAP_IF:
117538fd1498Szrj       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
117638fd1498Szrj 
117738fd1498Szrj     case PREFETCH:
117838fd1498Szrj       return reg_overlap_mentioned_p (x, XEXP (body, 0));
117938fd1498Szrj 
118038fd1498Szrj     case UNSPEC:
118138fd1498Szrj     case UNSPEC_VOLATILE:
118238fd1498Szrj       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
118338fd1498Szrj 	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
118438fd1498Szrj 	  return 1;
118538fd1498Szrj       return 0;
118638fd1498Szrj 
118738fd1498Szrj     case PARALLEL:
118838fd1498Szrj       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
118938fd1498Szrj 	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
119038fd1498Szrj 	  return 1;
119138fd1498Szrj       return 0;
119238fd1498Szrj 
119338fd1498Szrj     case CLOBBER:
119438fd1498Szrj       if (MEM_P (XEXP (body, 0)))
119538fd1498Szrj 	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
119638fd1498Szrj 	  return 1;
119738fd1498Szrj       return 0;
119838fd1498Szrj 
119938fd1498Szrj     case COND_EXEC:
120038fd1498Szrj       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
120138fd1498Szrj 	return 1;
120238fd1498Szrj       return reg_referenced_p (x, COND_EXEC_CODE (body));
120338fd1498Szrj 
120438fd1498Szrj     default:
120538fd1498Szrj       return 0;
120638fd1498Szrj     }
120738fd1498Szrj }
120838fd1498Szrj 
120938fd1498Szrj /* Nonzero if register REG is set or clobbered in an insn between
121038fd1498Szrj    FROM_INSN and TO_INSN (exclusive of those two).  */
121138fd1498Szrj 
121238fd1498Szrj int
reg_set_between_p(const_rtx reg,const rtx_insn * from_insn,const rtx_insn * to_insn)121338fd1498Szrj reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
121438fd1498Szrj 		   const rtx_insn *to_insn)
121538fd1498Szrj {
121638fd1498Szrj   const rtx_insn *insn;
121738fd1498Szrj 
121838fd1498Szrj   if (from_insn == to_insn)
121938fd1498Szrj     return 0;
122038fd1498Szrj 
122138fd1498Szrj   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
122238fd1498Szrj     if (INSN_P (insn) && reg_set_p (reg, insn))
122338fd1498Szrj       return 1;
122438fd1498Szrj   return 0;
122538fd1498Szrj }
122638fd1498Szrj 
122738fd1498Szrj /* Return true if REG is set or clobbered inside INSN.  */
122838fd1498Szrj 
122938fd1498Szrj int
reg_set_p(const_rtx reg,const_rtx insn)123038fd1498Szrj reg_set_p (const_rtx reg, const_rtx insn)
123138fd1498Szrj {
123238fd1498Szrj   /* After delay slot handling, call and branch insns might be in a
123338fd1498Szrj      sequence.  Check all the elements there.  */
123438fd1498Szrj   if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
123538fd1498Szrj     {
123638fd1498Szrj       for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
123738fd1498Szrj 	if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
123838fd1498Szrj 	  return true;
123938fd1498Szrj 
124038fd1498Szrj       return false;
124138fd1498Szrj     }
124238fd1498Szrj 
124338fd1498Szrj   /* We can be passed an insn or part of one.  If we are passed an insn,
124438fd1498Szrj      check if a side-effect of the insn clobbers REG.  */
124538fd1498Szrj   if (INSN_P (insn)
124638fd1498Szrj       && (FIND_REG_INC_NOTE (insn, reg)
124738fd1498Szrj 	  || (CALL_P (insn)
124838fd1498Szrj 	      && ((REG_P (reg)
124938fd1498Szrj 		   && REGNO (reg) < FIRST_PSEUDO_REGISTER
125038fd1498Szrj 		   && overlaps_hard_reg_set_p (regs_invalidated_by_call,
125138fd1498Szrj 					       GET_MODE (reg), REGNO (reg)))
125238fd1498Szrj 		  || MEM_P (reg)
125338fd1498Szrj 		  || find_reg_fusage (insn, CLOBBER, reg)))))
125438fd1498Szrj     return true;
125538fd1498Szrj 
125638fd1498Szrj   /* There are no REG_INC notes for SP autoinc.  */
125738fd1498Szrj   if (reg == stack_pointer_rtx && INSN_P (insn))
125838fd1498Szrj     {
125938fd1498Szrj       subrtx_var_iterator::array_type array;
126038fd1498Szrj       FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), NONCONST)
126138fd1498Szrj 	{
126238fd1498Szrj 	  rtx mem = *iter;
126338fd1498Szrj 	  if (mem
126438fd1498Szrj 	      && MEM_P (mem)
126538fd1498Szrj 	      && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
126638fd1498Szrj 	    {
126738fd1498Szrj 	      if (XEXP (XEXP (mem, 0), 0) == stack_pointer_rtx)
126838fd1498Szrj 		return true;
126938fd1498Szrj 	      iter.skip_subrtxes ();
127038fd1498Szrj 	    }
127138fd1498Szrj 	}
127238fd1498Szrj     }
127338fd1498Szrj 
127438fd1498Szrj   return set_of (reg, insn) != NULL_RTX;
127538fd1498Szrj }
127638fd1498Szrj 
127738fd1498Szrj /* Similar to reg_set_between_p, but check all registers in X.  Return 0
127838fd1498Szrj    only if none of them are modified between START and END.  Return 1 if
127938fd1498Szrj    X contains a MEM; this routine does use memory aliasing.  */
128038fd1498Szrj 
128138fd1498Szrj int
modified_between_p(const_rtx x,const rtx_insn * start,const rtx_insn * end)128238fd1498Szrj modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
128338fd1498Szrj {
128438fd1498Szrj   const enum rtx_code code = GET_CODE (x);
128538fd1498Szrj   const char *fmt;
128638fd1498Szrj   int i, j;
128738fd1498Szrj   rtx_insn *insn;
128838fd1498Szrj 
128938fd1498Szrj   if (start == end)
129038fd1498Szrj     return 0;
129138fd1498Szrj 
129238fd1498Szrj   switch (code)
129338fd1498Szrj     {
129438fd1498Szrj     CASE_CONST_ANY:
129538fd1498Szrj     case CONST:
129638fd1498Szrj     case SYMBOL_REF:
129738fd1498Szrj     case LABEL_REF:
129838fd1498Szrj       return 0;
129938fd1498Szrj 
130038fd1498Szrj     case PC:
130138fd1498Szrj     case CC0:
130238fd1498Szrj       return 1;
130338fd1498Szrj 
130438fd1498Szrj     case MEM:
130538fd1498Szrj       if (modified_between_p (XEXP (x, 0), start, end))
130638fd1498Szrj 	return 1;
130738fd1498Szrj       if (MEM_READONLY_P (x))
130838fd1498Szrj 	return 0;
130938fd1498Szrj       for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
131038fd1498Szrj 	if (memory_modified_in_insn_p (x, insn))
131138fd1498Szrj 	  return 1;
131238fd1498Szrj       return 0;
131338fd1498Szrj 
131438fd1498Szrj     case REG:
131538fd1498Szrj       return reg_set_between_p (x, start, end);
131638fd1498Szrj 
131738fd1498Szrj     default:
131838fd1498Szrj       break;
131938fd1498Szrj     }
132038fd1498Szrj 
132138fd1498Szrj   fmt = GET_RTX_FORMAT (code);
132238fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
132338fd1498Szrj     {
132438fd1498Szrj       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
132538fd1498Szrj 	return 1;
132638fd1498Szrj 
132738fd1498Szrj       else if (fmt[i] == 'E')
132838fd1498Szrj 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
132938fd1498Szrj 	  if (modified_between_p (XVECEXP (x, i, j), start, end))
133038fd1498Szrj 	    return 1;
133138fd1498Szrj     }
133238fd1498Szrj 
133338fd1498Szrj   return 0;
133438fd1498Szrj }
133538fd1498Szrj 
133638fd1498Szrj /* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
133738fd1498Szrj    of them are modified in INSN.  Return 1 if X contains a MEM; this routine
133838fd1498Szrj    does use memory aliasing.  */
133938fd1498Szrj 
134038fd1498Szrj int
modified_in_p(const_rtx x,const_rtx insn)134138fd1498Szrj modified_in_p (const_rtx x, const_rtx insn)
134238fd1498Szrj {
134338fd1498Szrj   const enum rtx_code code = GET_CODE (x);
134438fd1498Szrj   const char *fmt;
134538fd1498Szrj   int i, j;
134638fd1498Szrj 
134738fd1498Szrj   switch (code)
134838fd1498Szrj     {
134938fd1498Szrj     CASE_CONST_ANY:
135038fd1498Szrj     case CONST:
135138fd1498Szrj     case SYMBOL_REF:
135238fd1498Szrj     case LABEL_REF:
135338fd1498Szrj       return 0;
135438fd1498Szrj 
135538fd1498Szrj     case PC:
135638fd1498Szrj     case CC0:
135738fd1498Szrj       return 1;
135838fd1498Szrj 
135938fd1498Szrj     case MEM:
136038fd1498Szrj       if (modified_in_p (XEXP (x, 0), insn))
136138fd1498Szrj 	return 1;
136238fd1498Szrj       if (MEM_READONLY_P (x))
136338fd1498Szrj 	return 0;
136438fd1498Szrj       if (memory_modified_in_insn_p (x, insn))
136538fd1498Szrj 	return 1;
136638fd1498Szrj       return 0;
136738fd1498Szrj 
136838fd1498Szrj     case REG:
136938fd1498Szrj       return reg_set_p (x, insn);
137038fd1498Szrj 
137138fd1498Szrj     default:
137238fd1498Szrj       break;
137338fd1498Szrj     }
137438fd1498Szrj 
137538fd1498Szrj   fmt = GET_RTX_FORMAT (code);
137638fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
137738fd1498Szrj     {
137838fd1498Szrj       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
137938fd1498Szrj 	return 1;
138038fd1498Szrj 
138138fd1498Szrj       else if (fmt[i] == 'E')
138238fd1498Szrj 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
138338fd1498Szrj 	  if (modified_in_p (XVECEXP (x, i, j), insn))
138438fd1498Szrj 	    return 1;
138538fd1498Szrj     }
138638fd1498Szrj 
138738fd1498Szrj   return 0;
138838fd1498Szrj }
138938fd1498Szrj 
139038fd1498Szrj /* Return true if X is a SUBREG and if storing a value to X would
139138fd1498Szrj    preserve some of its SUBREG_REG.  For example, on a normal 32-bit
139238fd1498Szrj    target, using a SUBREG to store to one half of a DImode REG would
139338fd1498Szrj    preserve the other half.  */
139438fd1498Szrj 
139538fd1498Szrj bool
read_modify_subreg_p(const_rtx x)139638fd1498Szrj read_modify_subreg_p (const_rtx x)
139738fd1498Szrj {
139838fd1498Szrj   if (GET_CODE (x) != SUBREG)
139938fd1498Szrj     return false;
140038fd1498Szrj   poly_uint64 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
140138fd1498Szrj   poly_uint64 osize = GET_MODE_SIZE (GET_MODE (x));
140238fd1498Szrj   poly_uint64 regsize = REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
140338fd1498Szrj   /* The inner and outer modes of a subreg must be ordered, so that we
140438fd1498Szrj      can tell whether they're paradoxical or partial.  */
140538fd1498Szrj   gcc_checking_assert (ordered_p (isize, osize));
140638fd1498Szrj   return (maybe_gt (isize, osize) && maybe_gt (isize, regsize));
140738fd1498Szrj }
140838fd1498Szrj 
140938fd1498Szrj /* Helper function for set_of.  */
141038fd1498Szrj struct set_of_data
141138fd1498Szrj   {
141238fd1498Szrj     const_rtx found;
141338fd1498Szrj     const_rtx pat;
141438fd1498Szrj   };
141538fd1498Szrj 
141638fd1498Szrj static void
set_of_1(rtx x,const_rtx pat,void * data1)141738fd1498Szrj set_of_1 (rtx x, const_rtx pat, void *data1)
141838fd1498Szrj {
141938fd1498Szrj   struct set_of_data *const data = (struct set_of_data *) (data1);
142038fd1498Szrj   if (rtx_equal_p (x, data->pat)
142138fd1498Szrj       || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
142238fd1498Szrj     data->found = pat;
142338fd1498Szrj }
142438fd1498Szrj 
142538fd1498Szrj /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
142638fd1498Szrj    (either directly or via STRICT_LOW_PART and similar modifiers).  */
142738fd1498Szrj const_rtx
set_of(const_rtx pat,const_rtx insn)142838fd1498Szrj set_of (const_rtx pat, const_rtx insn)
142938fd1498Szrj {
143038fd1498Szrj   struct set_of_data data;
143138fd1498Szrj   data.found = NULL_RTX;
143238fd1498Szrj   data.pat = pat;
143338fd1498Szrj   note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
143438fd1498Szrj   return data.found;
143538fd1498Szrj }
143638fd1498Szrj 
143738fd1498Szrj /* Add all hard register in X to *PSET.  */
143838fd1498Szrj void
find_all_hard_regs(const_rtx x,HARD_REG_SET * pset)143938fd1498Szrj find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
144038fd1498Szrj {
144138fd1498Szrj   subrtx_iterator::array_type array;
144238fd1498Szrj   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
144338fd1498Szrj     {
144438fd1498Szrj       const_rtx x = *iter;
144538fd1498Szrj       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
144638fd1498Szrj 	add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
144738fd1498Szrj     }
144838fd1498Szrj }
144938fd1498Szrj 
145038fd1498Szrj /* This function, called through note_stores, collects sets and
145138fd1498Szrj    clobbers of hard registers in a HARD_REG_SET, which is pointed to
145238fd1498Szrj    by DATA.  */
145338fd1498Szrj void
record_hard_reg_sets(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)145438fd1498Szrj record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
145538fd1498Szrj {
145638fd1498Szrj   HARD_REG_SET *pset = (HARD_REG_SET *)data;
145738fd1498Szrj   if (REG_P (x) && HARD_REGISTER_P (x))
145838fd1498Szrj     add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
145938fd1498Szrj }
146038fd1498Szrj 
146138fd1498Szrj /* Examine INSN, and compute the set of hard registers written by it.
146238fd1498Szrj    Store it in *PSET.  Should only be called after reload.  */
146338fd1498Szrj void
find_all_hard_reg_sets(const rtx_insn * insn,HARD_REG_SET * pset,bool implicit)146438fd1498Szrj find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
146538fd1498Szrj {
146638fd1498Szrj   rtx link;
146738fd1498Szrj 
146838fd1498Szrj   CLEAR_HARD_REG_SET (*pset);
146938fd1498Szrj   note_stores (PATTERN (insn), record_hard_reg_sets, pset);
147038fd1498Szrj   if (CALL_P (insn))
147138fd1498Szrj     {
147238fd1498Szrj       if (implicit)
147338fd1498Szrj 	IOR_HARD_REG_SET (*pset, call_used_reg_set);
147438fd1498Szrj 
147538fd1498Szrj       for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
147638fd1498Szrj 	record_hard_reg_sets (XEXP (link, 0), NULL, pset);
147738fd1498Szrj     }
147838fd1498Szrj   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
147938fd1498Szrj     if (REG_NOTE_KIND (link) == REG_INC)
148038fd1498Szrj       record_hard_reg_sets (XEXP (link, 0), NULL, pset);
148138fd1498Szrj }
148238fd1498Szrj 
148338fd1498Szrj /* Like record_hard_reg_sets, but called through note_uses.  */
148438fd1498Szrj void
record_hard_reg_uses(rtx * px,void * data)148538fd1498Szrj record_hard_reg_uses (rtx *px, void *data)
148638fd1498Szrj {
148738fd1498Szrj   find_all_hard_regs (*px, (HARD_REG_SET *) data);
148838fd1498Szrj }
148938fd1498Szrj 
149038fd1498Szrj /* Given an INSN, return a SET expression if this insn has only a single SET.
149138fd1498Szrj    It may also have CLOBBERs, USEs, or SET whose output
149238fd1498Szrj    will not be used, which we ignore.  */
149338fd1498Szrj 
149438fd1498Szrj rtx
single_set_2(const rtx_insn * insn,const_rtx pat)149538fd1498Szrj single_set_2 (const rtx_insn *insn, const_rtx pat)
149638fd1498Szrj {
149738fd1498Szrj   rtx set = NULL;
149838fd1498Szrj   int set_verified = 1;
149938fd1498Szrj   int i;
150038fd1498Szrj 
150138fd1498Szrj   if (GET_CODE (pat) == PARALLEL)
150238fd1498Szrj     {
150338fd1498Szrj       for (i = 0; i < XVECLEN (pat, 0); i++)
150438fd1498Szrj 	{
150538fd1498Szrj 	  rtx sub = XVECEXP (pat, 0, i);
150638fd1498Szrj 	  switch (GET_CODE (sub))
150738fd1498Szrj 	    {
150838fd1498Szrj 	    case USE:
150938fd1498Szrj 	    case CLOBBER:
151038fd1498Szrj 	      break;
151138fd1498Szrj 
151238fd1498Szrj 	    case SET:
151338fd1498Szrj 	      /* We can consider insns having multiple sets, where all
151438fd1498Szrj 		 but one are dead as single set insns.  In common case
151538fd1498Szrj 		 only single set is present in the pattern so we want
151638fd1498Szrj 		 to avoid checking for REG_UNUSED notes unless necessary.
151738fd1498Szrj 
151838fd1498Szrj 		 When we reach set first time, we just expect this is
151938fd1498Szrj 		 the single set we are looking for and only when more
152038fd1498Szrj 		 sets are found in the insn, we check them.  */
152138fd1498Szrj 	      if (!set_verified)
152238fd1498Szrj 		{
152338fd1498Szrj 		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
152438fd1498Szrj 		      && !side_effects_p (set))
152538fd1498Szrj 		    set = NULL;
152638fd1498Szrj 		  else
152738fd1498Szrj 		    set_verified = 1;
152838fd1498Szrj 		}
152938fd1498Szrj 	      if (!set)
153038fd1498Szrj 		set = sub, set_verified = 0;
153138fd1498Szrj 	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
153238fd1498Szrj 		       || side_effects_p (sub))
153338fd1498Szrj 		return NULL_RTX;
153438fd1498Szrj 	      break;
153538fd1498Szrj 
153638fd1498Szrj 	    default:
153738fd1498Szrj 	      return NULL_RTX;
153838fd1498Szrj 	    }
153938fd1498Szrj 	}
154038fd1498Szrj     }
154138fd1498Szrj   return set;
154238fd1498Szrj }
154338fd1498Szrj 
154438fd1498Szrj /* Given an INSN, return nonzero if it has more than one SET, else return
154538fd1498Szrj    zero.  */
154638fd1498Szrj 
154738fd1498Szrj int
multiple_sets(const_rtx insn)154838fd1498Szrj multiple_sets (const_rtx insn)
154938fd1498Szrj {
155038fd1498Szrj   int found;
155138fd1498Szrj   int i;
155238fd1498Szrj 
155338fd1498Szrj   /* INSN must be an insn.  */
155438fd1498Szrj   if (! INSN_P (insn))
155538fd1498Szrj     return 0;
155638fd1498Szrj 
155738fd1498Szrj   /* Only a PARALLEL can have multiple SETs.  */
155838fd1498Szrj   if (GET_CODE (PATTERN (insn)) == PARALLEL)
155938fd1498Szrj     {
156038fd1498Szrj       for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
156138fd1498Szrj 	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
156238fd1498Szrj 	  {
156338fd1498Szrj 	    /* If we have already found a SET, then return now.  */
156438fd1498Szrj 	    if (found)
156538fd1498Szrj 	      return 1;
156638fd1498Szrj 	    else
156738fd1498Szrj 	      found = 1;
156838fd1498Szrj 	  }
156938fd1498Szrj     }
157038fd1498Szrj 
157138fd1498Szrj   /* Either zero or one SET.  */
157238fd1498Szrj   return 0;
157338fd1498Szrj }
157438fd1498Szrj 
157538fd1498Szrj /* Return nonzero if the destination of SET equals the source
157638fd1498Szrj    and there are no side effects.  */
157738fd1498Szrj 
157838fd1498Szrj int
set_noop_p(const_rtx set)157938fd1498Szrj set_noop_p (const_rtx set)
158038fd1498Szrj {
158138fd1498Szrj   rtx src = SET_SRC (set);
158238fd1498Szrj   rtx dst = SET_DEST (set);
158338fd1498Szrj 
158438fd1498Szrj   if (dst == pc_rtx && src == pc_rtx)
158538fd1498Szrj     return 1;
158638fd1498Szrj 
158738fd1498Szrj   if (MEM_P (dst) && MEM_P (src))
158838fd1498Szrj     return rtx_equal_p (dst, src) && !side_effects_p (dst);
158938fd1498Szrj 
159038fd1498Szrj   if (GET_CODE (dst) == ZERO_EXTRACT)
159138fd1498Szrj     return rtx_equal_p (XEXP (dst, 0), src)
159238fd1498Szrj 	   && !BITS_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
159338fd1498Szrj 	   && !side_effects_p (src);
159438fd1498Szrj 
159538fd1498Szrj   if (GET_CODE (dst) == STRICT_LOW_PART)
159638fd1498Szrj     dst = XEXP (dst, 0);
159738fd1498Szrj 
159838fd1498Szrj   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
159938fd1498Szrj     {
160038fd1498Szrj       if (maybe_ne (SUBREG_BYTE (src), SUBREG_BYTE (dst)))
160138fd1498Szrj 	return 0;
160238fd1498Szrj       src = SUBREG_REG (src);
160338fd1498Szrj       dst = SUBREG_REG (dst);
160438fd1498Szrj     }
160538fd1498Szrj 
160638fd1498Szrj   /* It is a NOOP if destination overlaps with selected src vector
160738fd1498Szrj      elements.  */
160838fd1498Szrj   if (GET_CODE (src) == VEC_SELECT
160938fd1498Szrj       && REG_P (XEXP (src, 0)) && REG_P (dst)
161038fd1498Szrj       && HARD_REGISTER_P (XEXP (src, 0))
161138fd1498Szrj       && HARD_REGISTER_P (dst))
161238fd1498Szrj     {
161338fd1498Szrj       int i;
161438fd1498Szrj       rtx par = XEXP (src, 1);
161538fd1498Szrj       rtx src0 = XEXP (src, 0);
161638fd1498Szrj       int c0 = INTVAL (XVECEXP (par, 0, 0));
161738fd1498Szrj       HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
161838fd1498Szrj 
161938fd1498Szrj       for (i = 1; i < XVECLEN (par, 0); i++)
162038fd1498Szrj 	if (INTVAL (XVECEXP (par, 0, i)) != c0 + i)
162138fd1498Szrj 	  return 0;
162238fd1498Szrj       return
162338fd1498Szrj 	simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
162438fd1498Szrj 			       offset, GET_MODE (dst)) == (int) REGNO (dst);
162538fd1498Szrj     }
162638fd1498Szrj 
162738fd1498Szrj   return (REG_P (src) && REG_P (dst)
162838fd1498Szrj 	  && REGNO (src) == REGNO (dst));
162938fd1498Szrj }
163038fd1498Szrj 
163138fd1498Szrj /* Return nonzero if an insn consists only of SETs, each of which only sets a
163238fd1498Szrj    value to itself.  */
163338fd1498Szrj 
163438fd1498Szrj int
noop_move_p(const rtx_insn * insn)163538fd1498Szrj noop_move_p (const rtx_insn *insn)
163638fd1498Szrj {
163738fd1498Szrj   rtx pat = PATTERN (insn);
163838fd1498Szrj 
163938fd1498Szrj   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
164038fd1498Szrj     return 1;
164138fd1498Szrj 
164238fd1498Szrj   /* Insns carrying these notes are useful later on.  */
164338fd1498Szrj   if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
164438fd1498Szrj     return 0;
164538fd1498Szrj 
164638fd1498Szrj   /* Check the code to be executed for COND_EXEC.  */
164738fd1498Szrj   if (GET_CODE (pat) == COND_EXEC)
164838fd1498Szrj     pat = COND_EXEC_CODE (pat);
164938fd1498Szrj 
165038fd1498Szrj   if (GET_CODE (pat) == SET && set_noop_p (pat))
165138fd1498Szrj     return 1;
165238fd1498Szrj 
165338fd1498Szrj   if (GET_CODE (pat) == PARALLEL)
165438fd1498Szrj     {
165538fd1498Szrj       int i;
165638fd1498Szrj       /* If nothing but SETs of registers to themselves,
165738fd1498Szrj 	 this insn can also be deleted.  */
165838fd1498Szrj       for (i = 0; i < XVECLEN (pat, 0); i++)
165938fd1498Szrj 	{
166038fd1498Szrj 	  rtx tem = XVECEXP (pat, 0, i);
166138fd1498Szrj 
166238fd1498Szrj 	  if (GET_CODE (tem) == USE
166338fd1498Szrj 	      || GET_CODE (tem) == CLOBBER)
166438fd1498Szrj 	    continue;
166538fd1498Szrj 
166638fd1498Szrj 	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
166738fd1498Szrj 	    return 0;
166838fd1498Szrj 	}
166938fd1498Szrj 
167038fd1498Szrj       return 1;
167138fd1498Szrj     }
167238fd1498Szrj   return 0;
167338fd1498Szrj }
167438fd1498Szrj 
167538fd1498Szrj 
167638fd1498Szrj /* Return nonzero if register in range [REGNO, ENDREGNO)
167738fd1498Szrj    appears either explicitly or implicitly in X
167838fd1498Szrj    other than being stored into.
167938fd1498Szrj 
168038fd1498Szrj    References contained within the substructure at LOC do not count.
168138fd1498Szrj    LOC may be zero, meaning don't ignore anything.  */
168238fd1498Szrj 
168338fd1498Szrj bool
refers_to_regno_p(unsigned int regno,unsigned int endregno,const_rtx x,rtx * loc)168438fd1498Szrj refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
168538fd1498Szrj 		   rtx *loc)
168638fd1498Szrj {
168738fd1498Szrj   int i;
168838fd1498Szrj   unsigned int x_regno;
168938fd1498Szrj   RTX_CODE code;
169038fd1498Szrj   const char *fmt;
169138fd1498Szrj 
169238fd1498Szrj  repeat:
169338fd1498Szrj   /* The contents of a REG_NONNEG note is always zero, so we must come here
169438fd1498Szrj      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
169538fd1498Szrj   if (x == 0)
169638fd1498Szrj     return false;
169738fd1498Szrj 
169838fd1498Szrj   code = GET_CODE (x);
169938fd1498Szrj 
170038fd1498Szrj   switch (code)
170138fd1498Szrj     {
170238fd1498Szrj     case REG:
170338fd1498Szrj       x_regno = REGNO (x);
170438fd1498Szrj 
170538fd1498Szrj       /* If we modifying the stack, frame, or argument pointer, it will
170638fd1498Szrj 	 clobber a virtual register.  In fact, we could be more precise,
170738fd1498Szrj 	 but it isn't worth it.  */
170838fd1498Szrj       if ((x_regno == STACK_POINTER_REGNUM
170938fd1498Szrj 	   || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
171038fd1498Szrj 	       && x_regno == ARG_POINTER_REGNUM)
171138fd1498Szrj 	   || x_regno == FRAME_POINTER_REGNUM)
171238fd1498Szrj 	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
171338fd1498Szrj 	return true;
171438fd1498Szrj 
171538fd1498Szrj       return endregno > x_regno && regno < END_REGNO (x);
171638fd1498Szrj 
171738fd1498Szrj     case SUBREG:
171838fd1498Szrj       /* If this is a SUBREG of a hard reg, we can see exactly which
171938fd1498Szrj 	 registers are being modified.  Otherwise, handle normally.  */
172038fd1498Szrj       if (REG_P (SUBREG_REG (x))
172138fd1498Szrj 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
172238fd1498Szrj 	{
172338fd1498Szrj 	  unsigned int inner_regno = subreg_regno (x);
172438fd1498Szrj 	  unsigned int inner_endregno
172538fd1498Szrj 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
172638fd1498Szrj 			     ? subreg_nregs (x) : 1);
172738fd1498Szrj 
172838fd1498Szrj 	  return endregno > inner_regno && regno < inner_endregno;
172938fd1498Szrj 	}
173038fd1498Szrj       break;
173138fd1498Szrj 
173238fd1498Szrj     case CLOBBER:
173338fd1498Szrj     case SET:
173438fd1498Szrj       if (&SET_DEST (x) != loc
173538fd1498Szrj 	  /* Note setting a SUBREG counts as referring to the REG it is in for
173638fd1498Szrj 	     a pseudo but not for hard registers since we can
173738fd1498Szrj 	     treat each word individually.  */
173838fd1498Szrj 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
173938fd1498Szrj 	       && loc != &SUBREG_REG (SET_DEST (x))
174038fd1498Szrj 	       && REG_P (SUBREG_REG (SET_DEST (x)))
174138fd1498Szrj 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
174238fd1498Szrj 	       && refers_to_regno_p (regno, endregno,
174338fd1498Szrj 				     SUBREG_REG (SET_DEST (x)), loc))
174438fd1498Szrj 	      || (!REG_P (SET_DEST (x))
174538fd1498Szrj 		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
174638fd1498Szrj 	return true;
174738fd1498Szrj 
174838fd1498Szrj       if (code == CLOBBER || loc == &SET_SRC (x))
174938fd1498Szrj 	return false;
175038fd1498Szrj       x = SET_SRC (x);
175138fd1498Szrj       goto repeat;
175238fd1498Szrj 
175338fd1498Szrj     default:
175438fd1498Szrj       break;
175538fd1498Szrj     }
175638fd1498Szrj 
175738fd1498Szrj   /* X does not match, so try its subexpressions.  */
175838fd1498Szrj 
175938fd1498Szrj   fmt = GET_RTX_FORMAT (code);
176038fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
176138fd1498Szrj     {
176238fd1498Szrj       if (fmt[i] == 'e' && loc != &XEXP (x, i))
176338fd1498Szrj 	{
176438fd1498Szrj 	  if (i == 0)
176538fd1498Szrj 	    {
176638fd1498Szrj 	      x = XEXP (x, 0);
176738fd1498Szrj 	      goto repeat;
176838fd1498Szrj 	    }
176938fd1498Szrj 	  else
177038fd1498Szrj 	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
177138fd1498Szrj 	      return true;
177238fd1498Szrj 	}
177338fd1498Szrj       else if (fmt[i] == 'E')
177438fd1498Szrj 	{
177538fd1498Szrj 	  int j;
177638fd1498Szrj 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
177738fd1498Szrj 	    if (loc != &XVECEXP (x, i, j)
177838fd1498Szrj 		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
177938fd1498Szrj 	      return true;
178038fd1498Szrj 	}
178138fd1498Szrj     }
178238fd1498Szrj   return false;
178338fd1498Szrj }
178438fd1498Szrj 
178538fd1498Szrj /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
178638fd1498Szrj    we check if any register number in X conflicts with the relevant register
178738fd1498Szrj    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
178838fd1498Szrj    contains a MEM (we don't bother checking for memory addresses that can't
178938fd1498Szrj    conflict because we expect this to be a rare case.  */
179038fd1498Szrj 
179138fd1498Szrj int
reg_overlap_mentioned_p(const_rtx x,const_rtx in)179238fd1498Szrj reg_overlap_mentioned_p (const_rtx x, const_rtx in)
179338fd1498Szrj {
179438fd1498Szrj   unsigned int regno, endregno;
179538fd1498Szrj 
179638fd1498Szrj   /* If either argument is a constant, then modifying X can not
179738fd1498Szrj      affect IN.  Here we look at IN, we can profitably combine
179838fd1498Szrj      CONSTANT_P (x) with the switch statement below.  */
179938fd1498Szrj   if (CONSTANT_P (in))
180038fd1498Szrj     return 0;
180138fd1498Szrj 
180238fd1498Szrj  recurse:
180338fd1498Szrj   switch (GET_CODE (x))
180438fd1498Szrj     {
180558e805e6Szrj     case CLOBBER:
180638fd1498Szrj     case STRICT_LOW_PART:
180738fd1498Szrj     case ZERO_EXTRACT:
180838fd1498Szrj     case SIGN_EXTRACT:
180938fd1498Szrj       /* Overly conservative.  */
181038fd1498Szrj       x = XEXP (x, 0);
181138fd1498Szrj       goto recurse;
181238fd1498Szrj 
181338fd1498Szrj     case SUBREG:
181438fd1498Szrj       regno = REGNO (SUBREG_REG (x));
181538fd1498Szrj       if (regno < FIRST_PSEUDO_REGISTER)
181638fd1498Szrj 	regno = subreg_regno (x);
181738fd1498Szrj       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
181838fd1498Szrj 			  ? subreg_nregs (x) : 1);
181938fd1498Szrj       goto do_reg;
182038fd1498Szrj 
182138fd1498Szrj     case REG:
182238fd1498Szrj       regno = REGNO (x);
182338fd1498Szrj       endregno = END_REGNO (x);
182438fd1498Szrj     do_reg:
182538fd1498Szrj       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
182638fd1498Szrj 
182738fd1498Szrj     case MEM:
182838fd1498Szrj       {
182938fd1498Szrj 	const char *fmt;
183038fd1498Szrj 	int i;
183138fd1498Szrj 
183238fd1498Szrj 	if (MEM_P (in))
183338fd1498Szrj 	  return 1;
183438fd1498Szrj 
183538fd1498Szrj 	fmt = GET_RTX_FORMAT (GET_CODE (in));
183638fd1498Szrj 	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
183738fd1498Szrj 	  if (fmt[i] == 'e')
183838fd1498Szrj 	    {
183938fd1498Szrj 	      if (reg_overlap_mentioned_p (x, XEXP (in, i)))
184038fd1498Szrj 		return 1;
184138fd1498Szrj 	    }
184238fd1498Szrj 	  else if (fmt[i] == 'E')
184338fd1498Szrj 	    {
184438fd1498Szrj 	      int j;
184538fd1498Szrj 	      for (j = XVECLEN (in, i) - 1; j >= 0; --j)
184638fd1498Szrj 		if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
184738fd1498Szrj 		  return 1;
184838fd1498Szrj 	    }
184938fd1498Szrj 
185038fd1498Szrj 	return 0;
185138fd1498Szrj       }
185238fd1498Szrj 
185338fd1498Szrj     case SCRATCH:
185438fd1498Szrj     case PC:
185538fd1498Szrj     case CC0:
185638fd1498Szrj       return reg_mentioned_p (x, in);
185738fd1498Szrj 
185838fd1498Szrj     case PARALLEL:
185938fd1498Szrj       {
186038fd1498Szrj 	int i;
186138fd1498Szrj 
186238fd1498Szrj 	/* If any register in here refers to it we return true.  */
186338fd1498Szrj 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
186438fd1498Szrj 	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
186538fd1498Szrj 	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
186638fd1498Szrj 	    return 1;
186738fd1498Szrj 	return 0;
186838fd1498Szrj       }
186938fd1498Szrj 
187038fd1498Szrj     default:
187138fd1498Szrj       gcc_assert (CONSTANT_P (x));
187238fd1498Szrj       return 0;
187338fd1498Szrj     }
187438fd1498Szrj }
187538fd1498Szrj 
187638fd1498Szrj /* Call FUN on each register or MEM that is stored into or clobbered by X.
187738fd1498Szrj    (X would be the pattern of an insn).  DATA is an arbitrary pointer,
187838fd1498Szrj    ignored by note_stores, but passed to FUN.
187938fd1498Szrj 
188038fd1498Szrj    FUN receives three arguments:
188138fd1498Szrj    1. the REG, MEM, CC0 or PC being stored in or clobbered,
188238fd1498Szrj    2. the SET or CLOBBER rtx that does the store,
188338fd1498Szrj    3. the pointer DATA provided to note_stores.
188438fd1498Szrj 
188538fd1498Szrj   If the item being stored in or clobbered is a SUBREG of a hard register,
188638fd1498Szrj   the SUBREG will be passed.  */
188738fd1498Szrj 
188838fd1498Szrj void
note_stores(const_rtx x,void (* fun)(rtx,const_rtx,void *),void * data)188938fd1498Szrj note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
189038fd1498Szrj {
189138fd1498Szrj   int i;
189238fd1498Szrj 
189338fd1498Szrj   if (GET_CODE (x) == COND_EXEC)
189438fd1498Szrj     x = COND_EXEC_CODE (x);
189538fd1498Szrj 
189638fd1498Szrj   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
189738fd1498Szrj     {
189838fd1498Szrj       rtx dest = SET_DEST (x);
189938fd1498Szrj 
190038fd1498Szrj       while ((GET_CODE (dest) == SUBREG
190138fd1498Szrj 	      && (!REG_P (SUBREG_REG (dest))
190238fd1498Szrj 		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
190338fd1498Szrj 	     || GET_CODE (dest) == ZERO_EXTRACT
190438fd1498Szrj 	     || GET_CODE (dest) == STRICT_LOW_PART)
190538fd1498Szrj 	dest = XEXP (dest, 0);
190638fd1498Szrj 
190738fd1498Szrj       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
190838fd1498Szrj 	 each of whose first operand is a register.  */
190938fd1498Szrj       if (GET_CODE (dest) == PARALLEL)
191038fd1498Szrj 	{
191138fd1498Szrj 	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
191238fd1498Szrj 	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
191338fd1498Szrj 	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
191438fd1498Szrj 	}
191538fd1498Szrj       else
191638fd1498Szrj 	(*fun) (dest, x, data);
191738fd1498Szrj     }
191838fd1498Szrj 
191938fd1498Szrj   else if (GET_CODE (x) == PARALLEL)
192038fd1498Szrj     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
192138fd1498Szrj       note_stores (XVECEXP (x, 0, i), fun, data);
192238fd1498Szrj }
192338fd1498Szrj 
192438fd1498Szrj /* Like notes_stores, but call FUN for each expression that is being
192538fd1498Szrj    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
192638fd1498Szrj    FUN for each expression, not any interior subexpressions.  FUN receives a
192738fd1498Szrj    pointer to the expression and the DATA passed to this function.
192838fd1498Szrj 
192938fd1498Szrj    Note that this is not quite the same test as that done in reg_referenced_p
193038fd1498Szrj    since that considers something as being referenced if it is being
193138fd1498Szrj    partially set, while we do not.  */
193238fd1498Szrj 
193338fd1498Szrj void
note_uses(rtx * pbody,void (* fun)(rtx *,void *),void * data)193438fd1498Szrj note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
193538fd1498Szrj {
193638fd1498Szrj   rtx body = *pbody;
193738fd1498Szrj   int i;
193838fd1498Szrj 
193938fd1498Szrj   switch (GET_CODE (body))
194038fd1498Szrj     {
194138fd1498Szrj     case COND_EXEC:
194238fd1498Szrj       (*fun) (&COND_EXEC_TEST (body), data);
194338fd1498Szrj       note_uses (&COND_EXEC_CODE (body), fun, data);
194438fd1498Szrj       return;
194538fd1498Szrj 
194638fd1498Szrj     case PARALLEL:
194738fd1498Szrj       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
194838fd1498Szrj 	note_uses (&XVECEXP (body, 0, i), fun, data);
194938fd1498Szrj       return;
195038fd1498Szrj 
195138fd1498Szrj     case SEQUENCE:
195238fd1498Szrj       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
195338fd1498Szrj 	note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
195438fd1498Szrj       return;
195538fd1498Szrj 
195638fd1498Szrj     case USE:
195738fd1498Szrj       (*fun) (&XEXP (body, 0), data);
195838fd1498Szrj       return;
195938fd1498Szrj 
196038fd1498Szrj     case ASM_OPERANDS:
196138fd1498Szrj       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
196238fd1498Szrj 	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
196338fd1498Szrj       return;
196438fd1498Szrj 
196538fd1498Szrj     case TRAP_IF:
196638fd1498Szrj       (*fun) (&TRAP_CONDITION (body), data);
196738fd1498Szrj       return;
196838fd1498Szrj 
196938fd1498Szrj     case PREFETCH:
197038fd1498Szrj       (*fun) (&XEXP (body, 0), data);
197138fd1498Szrj       return;
197238fd1498Szrj 
197338fd1498Szrj     case UNSPEC:
197438fd1498Szrj     case UNSPEC_VOLATILE:
197538fd1498Szrj       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
197638fd1498Szrj 	(*fun) (&XVECEXP (body, 0, i), data);
197738fd1498Szrj       return;
197838fd1498Szrj 
197938fd1498Szrj     case CLOBBER:
198038fd1498Szrj       if (MEM_P (XEXP (body, 0)))
198138fd1498Szrj 	(*fun) (&XEXP (XEXP (body, 0), 0), data);
198238fd1498Szrj       return;
198338fd1498Szrj 
198438fd1498Szrj     case SET:
198538fd1498Szrj       {
198638fd1498Szrj 	rtx dest = SET_DEST (body);
198738fd1498Szrj 
198838fd1498Szrj 	/* For sets we replace everything in source plus registers in memory
198938fd1498Szrj 	   expression in store and operands of a ZERO_EXTRACT.  */
199038fd1498Szrj 	(*fun) (&SET_SRC (body), data);
199138fd1498Szrj 
199238fd1498Szrj 	if (GET_CODE (dest) == ZERO_EXTRACT)
199338fd1498Szrj 	  {
199438fd1498Szrj 	    (*fun) (&XEXP (dest, 1), data);
199538fd1498Szrj 	    (*fun) (&XEXP (dest, 2), data);
199638fd1498Szrj 	  }
199738fd1498Szrj 
199838fd1498Szrj 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
199938fd1498Szrj 	  dest = XEXP (dest, 0);
200038fd1498Szrj 
200138fd1498Szrj 	if (MEM_P (dest))
200238fd1498Szrj 	  (*fun) (&XEXP (dest, 0), data);
200338fd1498Szrj       }
200438fd1498Szrj       return;
200538fd1498Szrj 
200638fd1498Szrj     default:
200738fd1498Szrj       /* All the other possibilities never store.  */
200838fd1498Szrj       (*fun) (pbody, data);
200938fd1498Szrj       return;
201038fd1498Szrj     }
201138fd1498Szrj }
201238fd1498Szrj 
201338fd1498Szrj /* Return nonzero if X's old contents don't survive after INSN.
201438fd1498Szrj    This will be true if X is (cc0) or if X is a register and
201538fd1498Szrj    X dies in INSN or because INSN entirely sets X.
201638fd1498Szrj 
201738fd1498Szrj    "Entirely set" means set directly and not through a SUBREG, or
201838fd1498Szrj    ZERO_EXTRACT, so no trace of the old contents remains.
201938fd1498Szrj    Likewise, REG_INC does not count.
202038fd1498Szrj 
202138fd1498Szrj    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
202238fd1498Szrj    but for this use that makes no difference, since regs don't overlap
202338fd1498Szrj    during their lifetimes.  Therefore, this function may be used
202438fd1498Szrj    at any time after deaths have been computed.
202538fd1498Szrj 
202638fd1498Szrj    If REG is a hard reg that occupies multiple machine registers, this
202738fd1498Szrj    function will only return 1 if each of those registers will be replaced
202838fd1498Szrj    by INSN.  */
202938fd1498Szrj 
203038fd1498Szrj int
dead_or_set_p(const rtx_insn * insn,const_rtx x)203138fd1498Szrj dead_or_set_p (const rtx_insn *insn, const_rtx x)
203238fd1498Szrj {
203338fd1498Szrj   unsigned int regno, end_regno;
203438fd1498Szrj   unsigned int i;
203538fd1498Szrj 
203638fd1498Szrj   /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
203738fd1498Szrj   if (GET_CODE (x) == CC0)
203838fd1498Szrj     return 1;
203938fd1498Szrj 
204038fd1498Szrj   gcc_assert (REG_P (x));
204138fd1498Szrj 
204238fd1498Szrj   regno = REGNO (x);
204338fd1498Szrj   end_regno = END_REGNO (x);
204438fd1498Szrj   for (i = regno; i < end_regno; i++)
204538fd1498Szrj     if (! dead_or_set_regno_p (insn, i))
204638fd1498Szrj       return 0;
204738fd1498Szrj 
204838fd1498Szrj   return 1;
204938fd1498Szrj }
205038fd1498Szrj 
205138fd1498Szrj /* Return TRUE iff DEST is a register or subreg of a register, is a
205238fd1498Szrj    complete rather than read-modify-write destination, and contains
205338fd1498Szrj    register TEST_REGNO.  */
205438fd1498Szrj 
205538fd1498Szrj static bool
covers_regno_no_parallel_p(const_rtx dest,unsigned int test_regno)205638fd1498Szrj covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
205738fd1498Szrj {
205838fd1498Szrj   unsigned int regno, endregno;
205938fd1498Szrj 
206038fd1498Szrj   if (GET_CODE (dest) == SUBREG && !read_modify_subreg_p (dest))
206138fd1498Szrj     dest = SUBREG_REG (dest);
206238fd1498Szrj 
206338fd1498Szrj   if (!REG_P (dest))
206438fd1498Szrj     return false;
206538fd1498Szrj 
206638fd1498Szrj   regno = REGNO (dest);
206738fd1498Szrj   endregno = END_REGNO (dest);
206838fd1498Szrj   return (test_regno >= regno && test_regno < endregno);
206938fd1498Szrj }
207038fd1498Szrj 
207138fd1498Szrj /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
207238fd1498Szrj    any member matches the covers_regno_no_parallel_p criteria.  */
207338fd1498Szrj 
207438fd1498Szrj static bool
covers_regno_p(const_rtx dest,unsigned int test_regno)207538fd1498Szrj covers_regno_p (const_rtx dest, unsigned int test_regno)
207638fd1498Szrj {
207738fd1498Szrj   if (GET_CODE (dest) == PARALLEL)
207838fd1498Szrj     {
207938fd1498Szrj       /* Some targets place small structures in registers for return
208038fd1498Szrj 	 values of functions, and those registers are wrapped in
208138fd1498Szrj 	 PARALLELs that we may see as the destination of a SET.  */
208238fd1498Szrj       int i;
208338fd1498Szrj 
208438fd1498Szrj       for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
208538fd1498Szrj 	{
208638fd1498Szrj 	  rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
208738fd1498Szrj 	  if (inner != NULL_RTX
208838fd1498Szrj 	      && covers_regno_no_parallel_p (inner, test_regno))
208938fd1498Szrj 	    return true;
209038fd1498Szrj 	}
209138fd1498Szrj 
209238fd1498Szrj       return false;
209338fd1498Szrj     }
209438fd1498Szrj   else
209538fd1498Szrj     return covers_regno_no_parallel_p (dest, test_regno);
209638fd1498Szrj }
209738fd1498Szrj 
209838fd1498Szrj /* Utility function for dead_or_set_p to check an individual register. */
209938fd1498Szrj 
210038fd1498Szrj int
dead_or_set_regno_p(const rtx_insn * insn,unsigned int test_regno)210138fd1498Szrj dead_or_set_regno_p (const rtx_insn *insn, unsigned int test_regno)
210238fd1498Szrj {
210338fd1498Szrj   const_rtx pattern;
210438fd1498Szrj 
210538fd1498Szrj   /* See if there is a death note for something that includes TEST_REGNO.  */
210638fd1498Szrj   if (find_regno_note (insn, REG_DEAD, test_regno))
210738fd1498Szrj     return 1;
210838fd1498Szrj 
210938fd1498Szrj   if (CALL_P (insn)
211038fd1498Szrj       && find_regno_fusage (insn, CLOBBER, test_regno))
211138fd1498Szrj     return 1;
211238fd1498Szrj 
211338fd1498Szrj   pattern = PATTERN (insn);
211438fd1498Szrj 
211538fd1498Szrj   /* If a COND_EXEC is not executed, the value survives.  */
211638fd1498Szrj   if (GET_CODE (pattern) == COND_EXEC)
211738fd1498Szrj     return 0;
211838fd1498Szrj 
211938fd1498Szrj   if (GET_CODE (pattern) == SET || GET_CODE (pattern) == CLOBBER)
212038fd1498Szrj     return covers_regno_p (SET_DEST (pattern), test_regno);
212138fd1498Szrj   else if (GET_CODE (pattern) == PARALLEL)
212238fd1498Szrj     {
212338fd1498Szrj       int i;
212438fd1498Szrj 
212538fd1498Szrj       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
212638fd1498Szrj 	{
212738fd1498Szrj 	  rtx body = XVECEXP (pattern, 0, i);
212838fd1498Szrj 
212938fd1498Szrj 	  if (GET_CODE (body) == COND_EXEC)
213038fd1498Szrj 	    body = COND_EXEC_CODE (body);
213138fd1498Szrj 
213238fd1498Szrj 	  if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
213338fd1498Szrj 	      && covers_regno_p (SET_DEST (body), test_regno))
213438fd1498Szrj 	    return 1;
213538fd1498Szrj 	}
213638fd1498Szrj     }
213738fd1498Szrj 
213838fd1498Szrj   return 0;
213938fd1498Szrj }
214038fd1498Szrj 
214138fd1498Szrj /* Return the reg-note of kind KIND in insn INSN, if there is one.
214238fd1498Szrj    If DATUM is nonzero, look for one whose datum is DATUM.  */
214338fd1498Szrj 
214438fd1498Szrj rtx
find_reg_note(const_rtx insn,enum reg_note kind,const_rtx datum)214538fd1498Szrj find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
214638fd1498Szrj {
214738fd1498Szrj   rtx link;
214838fd1498Szrj 
214938fd1498Szrj   gcc_checking_assert (insn);
215038fd1498Szrj 
215138fd1498Szrj   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
215238fd1498Szrj   if (! INSN_P (insn))
215338fd1498Szrj     return 0;
215438fd1498Szrj   if (datum == 0)
215538fd1498Szrj     {
215638fd1498Szrj       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
215738fd1498Szrj 	if (REG_NOTE_KIND (link) == kind)
215838fd1498Szrj 	  return link;
215938fd1498Szrj       return 0;
216038fd1498Szrj     }
216138fd1498Szrj 
216238fd1498Szrj   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
216338fd1498Szrj     if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
216438fd1498Szrj       return link;
216538fd1498Szrj   return 0;
216638fd1498Szrj }
216738fd1498Szrj 
216838fd1498Szrj /* Return the reg-note of kind KIND in insn INSN which applies to register
216938fd1498Szrj    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
217038fd1498Szrj    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
217138fd1498Szrj    it might be the case that the note overlaps REGNO.  */
217238fd1498Szrj 
217338fd1498Szrj rtx
find_regno_note(const_rtx insn,enum reg_note kind,unsigned int regno)217438fd1498Szrj find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
217538fd1498Szrj {
217638fd1498Szrj   rtx link;
217738fd1498Szrj 
217838fd1498Szrj   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
217938fd1498Szrj   if (! INSN_P (insn))
218038fd1498Szrj     return 0;
218138fd1498Szrj 
218238fd1498Szrj   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
218338fd1498Szrj     if (REG_NOTE_KIND (link) == kind
218438fd1498Szrj 	/* Verify that it is a register, so that scratch and MEM won't cause a
218538fd1498Szrj 	   problem here.  */
218638fd1498Szrj 	&& REG_P (XEXP (link, 0))
218738fd1498Szrj 	&& REGNO (XEXP (link, 0)) <= regno
218838fd1498Szrj 	&& END_REGNO (XEXP (link, 0)) > regno)
218938fd1498Szrj       return link;
219038fd1498Szrj   return 0;
219138fd1498Szrj }
219238fd1498Szrj 
219338fd1498Szrj /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
219438fd1498Szrj    has such a note.  */
219538fd1498Szrj 
219638fd1498Szrj rtx
find_reg_equal_equiv_note(const_rtx insn)219738fd1498Szrj find_reg_equal_equiv_note (const_rtx insn)
219838fd1498Szrj {
219938fd1498Szrj   rtx link;
220038fd1498Szrj 
220138fd1498Szrj   if (!INSN_P (insn))
220238fd1498Szrj     return 0;
220338fd1498Szrj 
220438fd1498Szrj   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
220538fd1498Szrj     if (REG_NOTE_KIND (link) == REG_EQUAL
220638fd1498Szrj 	|| REG_NOTE_KIND (link) == REG_EQUIV)
220738fd1498Szrj       {
220838fd1498Szrj 	/* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
220938fd1498Szrj 	   insns that have multiple sets.  Checking single_set to
221038fd1498Szrj 	   make sure of this is not the proper check, as explained
221138fd1498Szrj 	   in the comment in set_unique_reg_note.
221238fd1498Szrj 
221338fd1498Szrj 	   This should be changed into an assert.  */
221438fd1498Szrj 	if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
221538fd1498Szrj 	  return 0;
221638fd1498Szrj 	return link;
221738fd1498Szrj       }
221838fd1498Szrj   return NULL;
221938fd1498Szrj }
222038fd1498Szrj 
222138fd1498Szrj /* Check whether INSN is a single_set whose source is known to be
222238fd1498Szrj    equivalent to a constant.  Return that constant if so, otherwise
222338fd1498Szrj    return null.  */
222438fd1498Szrj 
222538fd1498Szrj rtx
find_constant_src(const rtx_insn * insn)222638fd1498Szrj find_constant_src (const rtx_insn *insn)
222738fd1498Szrj {
222838fd1498Szrj   rtx note, set, x;
222938fd1498Szrj 
223038fd1498Szrj   set = single_set (insn);
223138fd1498Szrj   if (set)
223238fd1498Szrj     {
223338fd1498Szrj       x = avoid_constant_pool_reference (SET_SRC (set));
223438fd1498Szrj       if (CONSTANT_P (x))
223538fd1498Szrj 	return x;
223638fd1498Szrj     }
223738fd1498Szrj 
223838fd1498Szrj   note = find_reg_equal_equiv_note (insn);
223938fd1498Szrj   if (note && CONSTANT_P (XEXP (note, 0)))
224038fd1498Szrj     return XEXP (note, 0);
224138fd1498Szrj 
224238fd1498Szrj   return NULL_RTX;
224338fd1498Szrj }
224438fd1498Szrj 
224538fd1498Szrj /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
224638fd1498Szrj    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
224738fd1498Szrj 
224838fd1498Szrj int
find_reg_fusage(const_rtx insn,enum rtx_code code,const_rtx datum)224938fd1498Szrj find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
225038fd1498Szrj {
225138fd1498Szrj   /* If it's not a CALL_INSN, it can't possibly have a
225238fd1498Szrj      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
225338fd1498Szrj   if (!CALL_P (insn))
225438fd1498Szrj     return 0;
225538fd1498Szrj 
225638fd1498Szrj   gcc_assert (datum);
225738fd1498Szrj 
225838fd1498Szrj   if (!REG_P (datum))
225938fd1498Szrj     {
226038fd1498Szrj       rtx link;
226138fd1498Szrj 
226238fd1498Szrj       for (link = CALL_INSN_FUNCTION_USAGE (insn);
226338fd1498Szrj 	   link;
226438fd1498Szrj 	   link = XEXP (link, 1))
226538fd1498Szrj 	if (GET_CODE (XEXP (link, 0)) == code
226638fd1498Szrj 	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
226738fd1498Szrj 	  return 1;
226838fd1498Szrj     }
226938fd1498Szrj   else
227038fd1498Szrj     {
227138fd1498Szrj       unsigned int regno = REGNO (datum);
227238fd1498Szrj 
227338fd1498Szrj       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
227438fd1498Szrj 	 to pseudo registers, so don't bother checking.  */
227538fd1498Szrj 
227638fd1498Szrj       if (regno < FIRST_PSEUDO_REGISTER)
227738fd1498Szrj 	{
227838fd1498Szrj 	  unsigned int end_regno = END_REGNO (datum);
227938fd1498Szrj 	  unsigned int i;
228038fd1498Szrj 
228138fd1498Szrj 	  for (i = regno; i < end_regno; i++)
228238fd1498Szrj 	    if (find_regno_fusage (insn, code, i))
228338fd1498Szrj 	      return 1;
228438fd1498Szrj 	}
228538fd1498Szrj     }
228638fd1498Szrj 
228738fd1498Szrj   return 0;
228838fd1498Szrj }
228938fd1498Szrj 
229038fd1498Szrj /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
229138fd1498Szrj    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
229238fd1498Szrj 
229338fd1498Szrj int
find_regno_fusage(const_rtx insn,enum rtx_code code,unsigned int regno)229438fd1498Szrj find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
229538fd1498Szrj {
229638fd1498Szrj   rtx link;
229738fd1498Szrj 
229838fd1498Szrj   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
229938fd1498Szrj      to pseudo registers, so don't bother checking.  */
230038fd1498Szrj 
230138fd1498Szrj   if (regno >= FIRST_PSEUDO_REGISTER
230238fd1498Szrj       || !CALL_P (insn) )
230338fd1498Szrj     return 0;
230438fd1498Szrj 
230538fd1498Szrj   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
230638fd1498Szrj     {
230738fd1498Szrj       rtx op, reg;
230838fd1498Szrj 
230938fd1498Szrj       if (GET_CODE (op = XEXP (link, 0)) == code
231038fd1498Szrj 	  && REG_P (reg = XEXP (op, 0))
231138fd1498Szrj 	  && REGNO (reg) <= regno
231238fd1498Szrj 	  && END_REGNO (reg) > regno)
231338fd1498Szrj 	return 1;
231438fd1498Szrj     }
231538fd1498Szrj 
231638fd1498Szrj   return 0;
231738fd1498Szrj }
231838fd1498Szrj 
231938fd1498Szrj 
232038fd1498Szrj /* Return true if KIND is an integer REG_NOTE.  */
232138fd1498Szrj 
232238fd1498Szrj static bool
int_reg_note_p(enum reg_note kind)232338fd1498Szrj int_reg_note_p (enum reg_note kind)
232438fd1498Szrj {
232538fd1498Szrj   return kind == REG_BR_PROB;
232638fd1498Szrj }
232738fd1498Szrj 
232838fd1498Szrj /* Allocate a register note with kind KIND and datum DATUM.  LIST is
232938fd1498Szrj    stored as the pointer to the next register note.  */
233038fd1498Szrj 
233138fd1498Szrj rtx
alloc_reg_note(enum reg_note kind,rtx datum,rtx list)233238fd1498Szrj alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
233338fd1498Szrj {
233438fd1498Szrj   rtx note;
233538fd1498Szrj 
233638fd1498Szrj   gcc_checking_assert (!int_reg_note_p (kind));
233738fd1498Szrj   switch (kind)
233838fd1498Szrj     {
233938fd1498Szrj     case REG_CC_SETTER:
234038fd1498Szrj     case REG_CC_USER:
234138fd1498Szrj     case REG_LABEL_TARGET:
234238fd1498Szrj     case REG_LABEL_OPERAND:
234338fd1498Szrj     case REG_TM:
234438fd1498Szrj       /* These types of register notes use an INSN_LIST rather than an
234538fd1498Szrj 	 EXPR_LIST, so that copying is done right and dumps look
234638fd1498Szrj 	 better.  */
234738fd1498Szrj       note = alloc_INSN_LIST (datum, list);
234838fd1498Szrj       PUT_REG_NOTE_KIND (note, kind);
234938fd1498Szrj       break;
235038fd1498Szrj 
235138fd1498Szrj     default:
235238fd1498Szrj       note = alloc_EXPR_LIST (kind, datum, list);
235338fd1498Szrj       break;
235438fd1498Szrj     }
235538fd1498Szrj 
235638fd1498Szrj   return note;
235738fd1498Szrj }
235838fd1498Szrj 
235938fd1498Szrj /* Add register note with kind KIND and datum DATUM to INSN.  */
236038fd1498Szrj 
236138fd1498Szrj void
add_reg_note(rtx insn,enum reg_note kind,rtx datum)236238fd1498Szrj add_reg_note (rtx insn, enum reg_note kind, rtx datum)
236338fd1498Szrj {
236438fd1498Szrj   REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
236538fd1498Szrj }
236638fd1498Szrj 
236738fd1498Szrj /* Add an integer register note with kind KIND and datum DATUM to INSN.  */
236838fd1498Szrj 
236938fd1498Szrj void
add_int_reg_note(rtx_insn * insn,enum reg_note kind,int datum)237038fd1498Szrj add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum)
237138fd1498Szrj {
237238fd1498Szrj   gcc_checking_assert (int_reg_note_p (kind));
237338fd1498Szrj   REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
237438fd1498Szrj 				       datum, REG_NOTES (insn));
237538fd1498Szrj }
237638fd1498Szrj 
237738fd1498Szrj /* Add a REG_ARGS_SIZE note to INSN with value VALUE.  */
237838fd1498Szrj 
237938fd1498Szrj void
add_args_size_note(rtx_insn * insn,poly_int64 value)238038fd1498Szrj add_args_size_note (rtx_insn *insn, poly_int64 value)
238138fd1498Szrj {
238238fd1498Szrj   gcc_checking_assert (!find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX));
238338fd1498Szrj   add_reg_note (insn, REG_ARGS_SIZE, gen_int_mode (value, Pmode));
238438fd1498Szrj }
238538fd1498Szrj 
238638fd1498Szrj /* Add a register note like NOTE to INSN.  */
238738fd1498Szrj 
238838fd1498Szrj void
add_shallow_copy_of_reg_note(rtx_insn * insn,rtx note)238938fd1498Szrj add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note)
239038fd1498Szrj {
239138fd1498Szrj   if (GET_CODE (note) == INT_LIST)
239238fd1498Szrj     add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
239338fd1498Szrj   else
239438fd1498Szrj     add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
239538fd1498Szrj }
239638fd1498Szrj 
239738fd1498Szrj /* Duplicate NOTE and return the copy.  */
239838fd1498Szrj rtx
duplicate_reg_note(rtx note)239938fd1498Szrj duplicate_reg_note (rtx note)
240038fd1498Szrj {
240138fd1498Szrj   reg_note kind = REG_NOTE_KIND (note);
240238fd1498Szrj 
240338fd1498Szrj   if (GET_CODE (note) == INT_LIST)
240438fd1498Szrj     return gen_rtx_INT_LIST ((machine_mode) kind, XINT (note, 0), NULL_RTX);
240538fd1498Szrj   else if (GET_CODE (note) == EXPR_LIST)
240638fd1498Szrj     return alloc_reg_note (kind, copy_insn_1 (XEXP (note, 0)), NULL_RTX);
240738fd1498Szrj   else
240838fd1498Szrj     return alloc_reg_note (kind, XEXP (note, 0), NULL_RTX);
240938fd1498Szrj }
241038fd1498Szrj 
241138fd1498Szrj /* Remove register note NOTE from the REG_NOTES of INSN.  */
241238fd1498Szrj 
241338fd1498Szrj void
remove_note(rtx_insn * insn,const_rtx note)241438fd1498Szrj remove_note (rtx_insn *insn, const_rtx note)
241538fd1498Szrj {
241638fd1498Szrj   rtx link;
241738fd1498Szrj 
241838fd1498Szrj   if (note == NULL_RTX)
241938fd1498Szrj     return;
242038fd1498Szrj 
242138fd1498Szrj   if (REG_NOTES (insn) == note)
242238fd1498Szrj     REG_NOTES (insn) = XEXP (note, 1);
242338fd1498Szrj   else
242438fd1498Szrj     for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
242538fd1498Szrj       if (XEXP (link, 1) == note)
242638fd1498Szrj 	{
242738fd1498Szrj 	  XEXP (link, 1) = XEXP (note, 1);
242838fd1498Szrj 	  break;
242938fd1498Szrj 	}
243038fd1498Szrj 
243138fd1498Szrj   switch (REG_NOTE_KIND (note))
243238fd1498Szrj     {
243338fd1498Szrj     case REG_EQUAL:
243438fd1498Szrj     case REG_EQUIV:
243538fd1498Szrj       df_notes_rescan (insn);
243638fd1498Szrj       break;
243738fd1498Szrj     default:
243838fd1498Szrj       break;
243938fd1498Szrj     }
244038fd1498Szrj }
244138fd1498Szrj 
244238fd1498Szrj /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.
244338fd1498Szrj    Return true if any note has been removed.  */
244438fd1498Szrj 
244538fd1498Szrj bool
remove_reg_equal_equiv_notes(rtx_insn * insn)244638fd1498Szrj remove_reg_equal_equiv_notes (rtx_insn *insn)
244738fd1498Szrj {
244838fd1498Szrj   rtx *loc;
244938fd1498Szrj   bool ret = false;
245038fd1498Szrj 
245138fd1498Szrj   loc = &REG_NOTES (insn);
245238fd1498Szrj   while (*loc)
245338fd1498Szrj     {
245438fd1498Szrj       enum reg_note kind = REG_NOTE_KIND (*loc);
245538fd1498Szrj       if (kind == REG_EQUAL || kind == REG_EQUIV)
245638fd1498Szrj 	{
245738fd1498Szrj 	  *loc = XEXP (*loc, 1);
245838fd1498Szrj 	  ret = true;
245938fd1498Szrj 	}
246038fd1498Szrj       else
246138fd1498Szrj 	loc = &XEXP (*loc, 1);
246238fd1498Szrj     }
246338fd1498Szrj   return ret;
246438fd1498Szrj }
246538fd1498Szrj 
246638fd1498Szrj /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO.  */
246738fd1498Szrj 
246838fd1498Szrj void
remove_reg_equal_equiv_notes_for_regno(unsigned int regno)246938fd1498Szrj remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
247038fd1498Szrj {
247138fd1498Szrj   df_ref eq_use;
247238fd1498Szrj 
247338fd1498Szrj   if (!df)
247438fd1498Szrj     return;
247538fd1498Szrj 
247638fd1498Szrj   /* This loop is a little tricky.  We cannot just go down the chain because
247738fd1498Szrj      it is being modified by some actions in the loop.  So we just iterate
247838fd1498Szrj      over the head.  We plan to drain the list anyway.  */
247938fd1498Szrj   while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
248038fd1498Szrj     {
248138fd1498Szrj       rtx_insn *insn = DF_REF_INSN (eq_use);
248238fd1498Szrj       rtx note = find_reg_equal_equiv_note (insn);
248338fd1498Szrj 
248438fd1498Szrj       /* This assert is generally triggered when someone deletes a REG_EQUAL
248538fd1498Szrj 	 or REG_EQUIV note by hacking the list manually rather than calling
248638fd1498Szrj 	 remove_note.  */
248738fd1498Szrj       gcc_assert (note);
248838fd1498Szrj 
248938fd1498Szrj       remove_note (insn, note);
249038fd1498Szrj     }
249138fd1498Szrj }
249238fd1498Szrj 
249338fd1498Szrj /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
249438fd1498Szrj    return 1 if it is found.  A simple equality test is used to determine if
249538fd1498Szrj    NODE matches.  */
249638fd1498Szrj 
249738fd1498Szrj bool
in_insn_list_p(const rtx_insn_list * listp,const rtx_insn * node)249838fd1498Szrj in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node)
249938fd1498Szrj {
250038fd1498Szrj   const_rtx x;
250138fd1498Szrj 
250238fd1498Szrj   for (x = listp; x; x = XEXP (x, 1))
250338fd1498Szrj     if (node == XEXP (x, 0))
250438fd1498Szrj       return true;
250538fd1498Szrj 
250638fd1498Szrj   return false;
250738fd1498Szrj }
250838fd1498Szrj 
250938fd1498Szrj /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
251038fd1498Szrj    remove that entry from the list if it is found.
251138fd1498Szrj 
251238fd1498Szrj    A simple equality test is used to determine if NODE matches.  */
251338fd1498Szrj 
251438fd1498Szrj void
remove_node_from_expr_list(const_rtx node,rtx_expr_list ** listp)251538fd1498Szrj remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp)
251638fd1498Szrj {
251738fd1498Szrj   rtx_expr_list *temp = *listp;
251838fd1498Szrj   rtx_expr_list *prev = NULL;
251938fd1498Szrj 
252038fd1498Szrj   while (temp)
252138fd1498Szrj     {
252238fd1498Szrj       if (node == temp->element ())
252338fd1498Szrj 	{
252438fd1498Szrj 	  /* Splice the node out of the list.  */
252538fd1498Szrj 	  if (prev)
252638fd1498Szrj 	    XEXP (prev, 1) = temp->next ();
252738fd1498Szrj 	  else
252838fd1498Szrj 	    *listp = temp->next ();
252938fd1498Szrj 
253038fd1498Szrj 	  return;
253138fd1498Szrj 	}
253238fd1498Szrj 
253338fd1498Szrj       prev = temp;
253438fd1498Szrj       temp = temp->next ();
253538fd1498Szrj     }
253638fd1498Szrj }
253738fd1498Szrj 
253838fd1498Szrj /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
253938fd1498Szrj    remove that entry from the list if it is found.
254038fd1498Szrj 
254138fd1498Szrj    A simple equality test is used to determine if NODE matches.  */
254238fd1498Szrj 
254338fd1498Szrj void
remove_node_from_insn_list(const rtx_insn * node,rtx_insn_list ** listp)254438fd1498Szrj remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
254538fd1498Szrj {
254638fd1498Szrj   rtx_insn_list *temp = *listp;
254738fd1498Szrj   rtx_insn_list *prev = NULL;
254838fd1498Szrj 
254938fd1498Szrj   while (temp)
255038fd1498Szrj     {
255138fd1498Szrj       if (node == temp->insn ())
255238fd1498Szrj 	{
255338fd1498Szrj 	  /* Splice the node out of the list.  */
255438fd1498Szrj 	  if (prev)
255538fd1498Szrj 	    XEXP (prev, 1) = temp->next ();
255638fd1498Szrj 	  else
255738fd1498Szrj 	    *listp = temp->next ();
255838fd1498Szrj 
255938fd1498Szrj 	  return;
256038fd1498Szrj 	}
256138fd1498Szrj 
256238fd1498Szrj       prev = temp;
256338fd1498Szrj       temp = temp->next ();
256438fd1498Szrj     }
256538fd1498Szrj }
256638fd1498Szrj 
256738fd1498Szrj /* Nonzero if X contains any volatile instructions.  These are instructions
256838fd1498Szrj    which may cause unpredictable machine state instructions, and thus no
256938fd1498Szrj    instructions or register uses should be moved or combined across them.
257038fd1498Szrj    This includes only volatile asms and UNSPEC_VOLATILE instructions.  */
257138fd1498Szrj 
257238fd1498Szrj int
volatile_insn_p(const_rtx x)257338fd1498Szrj volatile_insn_p (const_rtx x)
257438fd1498Szrj {
257538fd1498Szrj   const RTX_CODE code = GET_CODE (x);
257638fd1498Szrj   switch (code)
257738fd1498Szrj     {
257838fd1498Szrj     case LABEL_REF:
257938fd1498Szrj     case SYMBOL_REF:
258038fd1498Szrj     case CONST:
258138fd1498Szrj     CASE_CONST_ANY:
258238fd1498Szrj     case CC0:
258338fd1498Szrj     case PC:
258438fd1498Szrj     case REG:
258538fd1498Szrj     case SCRATCH:
258638fd1498Szrj     case CLOBBER:
258738fd1498Szrj     case ADDR_VEC:
258838fd1498Szrj     case ADDR_DIFF_VEC:
258938fd1498Szrj     case CALL:
259038fd1498Szrj     case MEM:
259138fd1498Szrj       return 0;
259238fd1498Szrj 
259338fd1498Szrj     case UNSPEC_VOLATILE:
259438fd1498Szrj       return 1;
259538fd1498Szrj 
259638fd1498Szrj     case ASM_INPUT:
259738fd1498Szrj     case ASM_OPERANDS:
259838fd1498Szrj       if (MEM_VOLATILE_P (x))
259938fd1498Szrj 	return 1;
260038fd1498Szrj 
260138fd1498Szrj     default:
260238fd1498Szrj       break;
260338fd1498Szrj     }
260438fd1498Szrj 
260538fd1498Szrj   /* Recursively scan the operands of this expression.  */
260638fd1498Szrj 
260738fd1498Szrj   {
260838fd1498Szrj     const char *const fmt = GET_RTX_FORMAT (code);
260938fd1498Szrj     int i;
261038fd1498Szrj 
261138fd1498Szrj     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
261238fd1498Szrj       {
261338fd1498Szrj 	if (fmt[i] == 'e')
261438fd1498Szrj 	  {
261538fd1498Szrj 	    if (volatile_insn_p (XEXP (x, i)))
261638fd1498Szrj 	      return 1;
261738fd1498Szrj 	  }
261838fd1498Szrj 	else if (fmt[i] == 'E')
261938fd1498Szrj 	  {
262038fd1498Szrj 	    int j;
262138fd1498Szrj 	    for (j = 0; j < XVECLEN (x, i); j++)
262238fd1498Szrj 	      if (volatile_insn_p (XVECEXP (x, i, j)))
262338fd1498Szrj 		return 1;
262438fd1498Szrj 	  }
262538fd1498Szrj       }
262638fd1498Szrj   }
262738fd1498Szrj   return 0;
262838fd1498Szrj }
262938fd1498Szrj 
263038fd1498Szrj /* Nonzero if X contains any volatile memory references
263138fd1498Szrj    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
263238fd1498Szrj 
263338fd1498Szrj int
volatile_refs_p(const_rtx x)263438fd1498Szrj volatile_refs_p (const_rtx x)
263538fd1498Szrj {
263638fd1498Szrj   const RTX_CODE code = GET_CODE (x);
263738fd1498Szrj   switch (code)
263838fd1498Szrj     {
263938fd1498Szrj     case LABEL_REF:
264038fd1498Szrj     case SYMBOL_REF:
264138fd1498Szrj     case CONST:
264238fd1498Szrj     CASE_CONST_ANY:
264338fd1498Szrj     case CC0:
264438fd1498Szrj     case PC:
264538fd1498Szrj     case REG:
264638fd1498Szrj     case SCRATCH:
264738fd1498Szrj     case CLOBBER:
264838fd1498Szrj     case ADDR_VEC:
264938fd1498Szrj     case ADDR_DIFF_VEC:
265038fd1498Szrj       return 0;
265138fd1498Szrj 
265238fd1498Szrj     case UNSPEC_VOLATILE:
265338fd1498Szrj       return 1;
265438fd1498Szrj 
265538fd1498Szrj     case MEM:
265638fd1498Szrj     case ASM_INPUT:
265738fd1498Szrj     case ASM_OPERANDS:
265838fd1498Szrj       if (MEM_VOLATILE_P (x))
265938fd1498Szrj 	return 1;
266038fd1498Szrj 
266138fd1498Szrj     default:
266238fd1498Szrj       break;
266338fd1498Szrj     }
266438fd1498Szrj 
266538fd1498Szrj   /* Recursively scan the operands of this expression.  */
266638fd1498Szrj 
266738fd1498Szrj   {
266838fd1498Szrj     const char *const fmt = GET_RTX_FORMAT (code);
266938fd1498Szrj     int i;
267038fd1498Szrj 
267138fd1498Szrj     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
267238fd1498Szrj       {
267338fd1498Szrj 	if (fmt[i] == 'e')
267438fd1498Szrj 	  {
267538fd1498Szrj 	    if (volatile_refs_p (XEXP (x, i)))
267638fd1498Szrj 	      return 1;
267738fd1498Szrj 	  }
267838fd1498Szrj 	else if (fmt[i] == 'E')
267938fd1498Szrj 	  {
268038fd1498Szrj 	    int j;
268138fd1498Szrj 	    for (j = 0; j < XVECLEN (x, i); j++)
268238fd1498Szrj 	      if (volatile_refs_p (XVECEXP (x, i, j)))
268338fd1498Szrj 		return 1;
268438fd1498Szrj 	  }
268538fd1498Szrj       }
268638fd1498Szrj   }
268738fd1498Szrj   return 0;
268838fd1498Szrj }
268938fd1498Szrj 
269038fd1498Szrj /* Similar to above, except that it also rejects register pre- and post-
269138fd1498Szrj    incrementing.  */
269238fd1498Szrj 
269338fd1498Szrj int
side_effects_p(const_rtx x)269438fd1498Szrj side_effects_p (const_rtx x)
269538fd1498Szrj {
269638fd1498Szrj   const RTX_CODE code = GET_CODE (x);
269738fd1498Szrj   switch (code)
269838fd1498Szrj     {
269938fd1498Szrj     case LABEL_REF:
270038fd1498Szrj     case SYMBOL_REF:
270138fd1498Szrj     case CONST:
270238fd1498Szrj     CASE_CONST_ANY:
270338fd1498Szrj     case CC0:
270438fd1498Szrj     case PC:
270538fd1498Szrj     case REG:
270638fd1498Szrj     case SCRATCH:
270738fd1498Szrj     case ADDR_VEC:
270838fd1498Szrj     case ADDR_DIFF_VEC:
270938fd1498Szrj     case VAR_LOCATION:
271038fd1498Szrj       return 0;
271138fd1498Szrj 
271238fd1498Szrj     case CLOBBER:
271338fd1498Szrj       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
271438fd1498Szrj 	 when some combination can't be done.  If we see one, don't think
271538fd1498Szrj 	 that we can simplify the expression.  */
271638fd1498Szrj       return (GET_MODE (x) != VOIDmode);
271738fd1498Szrj 
271838fd1498Szrj     case PRE_INC:
271938fd1498Szrj     case PRE_DEC:
272038fd1498Szrj     case POST_INC:
272138fd1498Szrj     case POST_DEC:
272238fd1498Szrj     case PRE_MODIFY:
272338fd1498Szrj     case POST_MODIFY:
272438fd1498Szrj     case CALL:
272538fd1498Szrj     case UNSPEC_VOLATILE:
272638fd1498Szrj       return 1;
272738fd1498Szrj 
272838fd1498Szrj     case MEM:
272938fd1498Szrj     case ASM_INPUT:
273038fd1498Szrj     case ASM_OPERANDS:
273138fd1498Szrj       if (MEM_VOLATILE_P (x))
273238fd1498Szrj 	return 1;
273338fd1498Szrj 
273438fd1498Szrj     default:
273538fd1498Szrj       break;
273638fd1498Szrj     }
273738fd1498Szrj 
273838fd1498Szrj   /* Recursively scan the operands of this expression.  */
273938fd1498Szrj 
274038fd1498Szrj   {
274138fd1498Szrj     const char *fmt = GET_RTX_FORMAT (code);
274238fd1498Szrj     int i;
274338fd1498Szrj 
274438fd1498Szrj     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
274538fd1498Szrj       {
274638fd1498Szrj 	if (fmt[i] == 'e')
274738fd1498Szrj 	  {
274838fd1498Szrj 	    if (side_effects_p (XEXP (x, i)))
274938fd1498Szrj 	      return 1;
275038fd1498Szrj 	  }
275138fd1498Szrj 	else if (fmt[i] == 'E')
275238fd1498Szrj 	  {
275338fd1498Szrj 	    int j;
275438fd1498Szrj 	    for (j = 0; j < XVECLEN (x, i); j++)
275538fd1498Szrj 	      if (side_effects_p (XVECEXP (x, i, j)))
275638fd1498Szrj 		return 1;
275738fd1498Szrj 	  }
275838fd1498Szrj       }
275938fd1498Szrj   }
276038fd1498Szrj   return 0;
276138fd1498Szrj }
276238fd1498Szrj 
276338fd1498Szrj /* Return nonzero if evaluating rtx X might cause a trap.
276438fd1498Szrj    FLAGS controls how to consider MEMs.  A nonzero means the context
276538fd1498Szrj    of the access may have changed from the original, such that the
276638fd1498Szrj    address may have become invalid.  */
276738fd1498Szrj 
276838fd1498Szrj int
may_trap_p_1(const_rtx x,unsigned flags)276938fd1498Szrj may_trap_p_1 (const_rtx x, unsigned flags)
277038fd1498Szrj {
277138fd1498Szrj   int i;
277238fd1498Szrj   enum rtx_code code;
277338fd1498Szrj   const char *fmt;
277438fd1498Szrj 
277538fd1498Szrj   /* We make no distinction currently, but this function is part of
277638fd1498Szrj      the internal target-hooks ABI so we keep the parameter as
277738fd1498Szrj      "unsigned flags".  */
277838fd1498Szrj   bool code_changed = flags != 0;
277938fd1498Szrj 
278038fd1498Szrj   if (x == 0)
278138fd1498Szrj     return 0;
278238fd1498Szrj   code = GET_CODE (x);
278338fd1498Szrj   switch (code)
278438fd1498Szrj     {
278538fd1498Szrj       /* Handle these cases quickly.  */
278638fd1498Szrj     CASE_CONST_ANY:
278738fd1498Szrj     case SYMBOL_REF:
278838fd1498Szrj     case LABEL_REF:
278938fd1498Szrj     case CONST:
279038fd1498Szrj     case PC:
279138fd1498Szrj     case CC0:
279238fd1498Szrj     case REG:
279338fd1498Szrj     case SCRATCH:
279438fd1498Szrj       return 0;
279538fd1498Szrj 
279638fd1498Szrj     case UNSPEC:
279738fd1498Szrj       return targetm.unspec_may_trap_p (x, flags);
279838fd1498Szrj 
279938fd1498Szrj     case UNSPEC_VOLATILE:
280038fd1498Szrj     case ASM_INPUT:
280138fd1498Szrj     case TRAP_IF:
280238fd1498Szrj       return 1;
280338fd1498Szrj 
280438fd1498Szrj     case ASM_OPERANDS:
280538fd1498Szrj       return MEM_VOLATILE_P (x);
280638fd1498Szrj 
280738fd1498Szrj       /* Memory ref can trap unless it's a static var or a stack slot.  */
280838fd1498Szrj     case MEM:
280938fd1498Szrj       /* Recognize specific pattern of stack checking probes.  */
281038fd1498Szrj       if (flag_stack_check
281138fd1498Szrj 	  && MEM_VOLATILE_P (x)
281238fd1498Szrj 	  && XEXP (x, 0) == stack_pointer_rtx)
281338fd1498Szrj 	return 1;
281438fd1498Szrj       if (/* MEM_NOTRAP_P only relates to the actual position of the memory
281538fd1498Szrj 	     reference; moving it out of context such as when moving code
281638fd1498Szrj 	     when optimizing, might cause its address to become invalid.  */
281738fd1498Szrj 	  code_changed
281838fd1498Szrj 	  || !MEM_NOTRAP_P (x))
281938fd1498Szrj 	{
282038fd1498Szrj 	  poly_int64 size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : -1;
282138fd1498Szrj 	  return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
282238fd1498Szrj 					GET_MODE (x), code_changed);
282338fd1498Szrj 	}
282438fd1498Szrj 
282538fd1498Szrj       return 0;
282638fd1498Szrj 
282738fd1498Szrj       /* Division by a non-constant might trap.  */
282838fd1498Szrj     case DIV:
282938fd1498Szrj     case MOD:
283038fd1498Szrj     case UDIV:
283138fd1498Szrj     case UMOD:
283238fd1498Szrj       if (HONOR_SNANS (x))
283338fd1498Szrj 	return 1;
283438fd1498Szrj       if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
283538fd1498Szrj 	return flag_trapping_math;
283638fd1498Szrj       if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
283738fd1498Szrj 	return 1;
283838fd1498Szrj       break;
283938fd1498Szrj 
284038fd1498Szrj     case EXPR_LIST:
284138fd1498Szrj       /* An EXPR_LIST is used to represent a function call.  This
284238fd1498Szrj 	 certainly may trap.  */
284338fd1498Szrj       return 1;
284438fd1498Szrj 
284538fd1498Szrj     case GE:
284638fd1498Szrj     case GT:
284738fd1498Szrj     case LE:
284838fd1498Szrj     case LT:
284938fd1498Szrj     case LTGT:
285038fd1498Szrj     case COMPARE:
285138fd1498Szrj       /* Some floating point comparisons may trap.  */
285238fd1498Szrj       if (!flag_trapping_math)
285338fd1498Szrj 	break;
285438fd1498Szrj       /* ??? There is no machine independent way to check for tests that trap
285538fd1498Szrj 	 when COMPARE is used, though many targets do make this distinction.
285638fd1498Szrj 	 For instance, sparc uses CCFPE for compares which generate exceptions
285738fd1498Szrj 	 and CCFP for compares which do not generate exceptions.  */
285838fd1498Szrj       if (HONOR_NANS (x))
285938fd1498Szrj 	return 1;
286038fd1498Szrj       /* But often the compare has some CC mode, so check operand
286138fd1498Szrj 	 modes as well.  */
286238fd1498Szrj       if (HONOR_NANS (XEXP (x, 0))
286338fd1498Szrj 	  || HONOR_NANS (XEXP (x, 1)))
286438fd1498Szrj 	return 1;
286538fd1498Szrj       break;
286638fd1498Szrj 
286738fd1498Szrj     case EQ:
286838fd1498Szrj     case NE:
286938fd1498Szrj       if (HONOR_SNANS (x))
287038fd1498Szrj 	return 1;
287138fd1498Szrj       /* Often comparison is CC mode, so check operand modes.  */
287238fd1498Szrj       if (HONOR_SNANS (XEXP (x, 0))
287338fd1498Szrj 	  || HONOR_SNANS (XEXP (x, 1)))
287438fd1498Szrj 	return 1;
287538fd1498Szrj       break;
287638fd1498Szrj 
287738fd1498Szrj     case FIX:
287838fd1498Szrj       /* Conversion of floating point might trap.  */
287938fd1498Szrj       if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
288038fd1498Szrj 	return 1;
288138fd1498Szrj       break;
288238fd1498Szrj 
288338fd1498Szrj     case NEG:
288438fd1498Szrj     case ABS:
288538fd1498Szrj     case SUBREG:
288638fd1498Szrj       /* These operations don't trap even with floating point.  */
288738fd1498Szrj       break;
288838fd1498Szrj 
288938fd1498Szrj     default:
289038fd1498Szrj       /* Any floating arithmetic may trap.  */
289138fd1498Szrj       if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
289238fd1498Szrj 	return 1;
289338fd1498Szrj     }
289438fd1498Szrj 
289538fd1498Szrj   fmt = GET_RTX_FORMAT (code);
289638fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
289738fd1498Szrj     {
289838fd1498Szrj       if (fmt[i] == 'e')
289938fd1498Szrj 	{
290038fd1498Szrj 	  if (may_trap_p_1 (XEXP (x, i), flags))
290138fd1498Szrj 	    return 1;
290238fd1498Szrj 	}
290338fd1498Szrj       else if (fmt[i] == 'E')
290438fd1498Szrj 	{
290538fd1498Szrj 	  int j;
290638fd1498Szrj 	  for (j = 0; j < XVECLEN (x, i); j++)
290738fd1498Szrj 	    if (may_trap_p_1 (XVECEXP (x, i, j), flags))
290838fd1498Szrj 	      return 1;
290938fd1498Szrj 	}
291038fd1498Szrj     }
291138fd1498Szrj   return 0;
291238fd1498Szrj }
291338fd1498Szrj 
291438fd1498Szrj /* Return nonzero if evaluating rtx X might cause a trap.  */
291538fd1498Szrj 
291638fd1498Szrj int
may_trap_p(const_rtx x)291738fd1498Szrj may_trap_p (const_rtx x)
291838fd1498Szrj {
291938fd1498Szrj   return may_trap_p_1 (x, 0);
292038fd1498Szrj }
292138fd1498Szrj 
292238fd1498Szrj /* Same as above, but additionally return nonzero if evaluating rtx X might
292338fd1498Szrj    cause a fault.  We define a fault for the purpose of this function as a
292438fd1498Szrj    erroneous execution condition that cannot be encountered during the normal
292538fd1498Szrj    execution of a valid program; the typical example is an unaligned memory
292638fd1498Szrj    access on a strict alignment machine.  The compiler guarantees that it
292738fd1498Szrj    doesn't generate code that will fault from a valid program, but this
292838fd1498Szrj    guarantee doesn't mean anything for individual instructions.  Consider
292938fd1498Szrj    the following example:
293038fd1498Szrj 
293138fd1498Szrj       struct S { int d; union { char *cp; int *ip; }; };
293238fd1498Szrj 
293338fd1498Szrj       int foo(struct S *s)
293438fd1498Szrj       {
293538fd1498Szrj 	if (s->d == 1)
293638fd1498Szrj 	  return *s->ip;
293738fd1498Szrj 	else
293838fd1498Szrj 	  return *s->cp;
293938fd1498Szrj       }
294038fd1498Szrj 
294138fd1498Szrj    on a strict alignment machine.  In a valid program, foo will never be
294238fd1498Szrj    invoked on a structure for which d is equal to 1 and the underlying
294338fd1498Szrj    unique field of the union not aligned on a 4-byte boundary, but the
294438fd1498Szrj    expression *s->ip might cause a fault if considered individually.
294538fd1498Szrj 
294638fd1498Szrj    At the RTL level, potentially problematic expressions will almost always
294738fd1498Szrj    verify may_trap_p; for example, the above dereference can be emitted as
294838fd1498Szrj    (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
294938fd1498Szrj    However, suppose that foo is inlined in a caller that causes s->cp to
295038fd1498Szrj    point to a local character variable and guarantees that s->d is not set
295138fd1498Szrj    to 1; foo may have been effectively translated into pseudo-RTL as:
295238fd1498Szrj 
295338fd1498Szrj       if ((reg:SI) == 1)
295438fd1498Szrj 	(set (reg:SI) (mem:SI (%fp - 7)))
295538fd1498Szrj       else
295638fd1498Szrj 	(set (reg:QI) (mem:QI (%fp - 7)))
295738fd1498Szrj 
295838fd1498Szrj    Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
295938fd1498Szrj    memory reference to a stack slot, but it will certainly cause a fault
296038fd1498Szrj    on a strict alignment machine.  */
296138fd1498Szrj 
296238fd1498Szrj int
may_trap_or_fault_p(const_rtx x)296338fd1498Szrj may_trap_or_fault_p (const_rtx x)
296438fd1498Szrj {
296538fd1498Szrj   return may_trap_p_1 (x, 1);
296638fd1498Szrj }
296738fd1498Szrj 
296838fd1498Szrj /* Return nonzero if X contains a comparison that is not either EQ or NE,
296938fd1498Szrj    i.e., an inequality.  */
297038fd1498Szrj 
297138fd1498Szrj int
inequality_comparisons_p(const_rtx x)297238fd1498Szrj inequality_comparisons_p (const_rtx x)
297338fd1498Szrj {
297438fd1498Szrj   const char *fmt;
297538fd1498Szrj   int len, i;
297638fd1498Szrj   const enum rtx_code code = GET_CODE (x);
297738fd1498Szrj 
297838fd1498Szrj   switch (code)
297938fd1498Szrj     {
298038fd1498Szrj     case REG:
298138fd1498Szrj     case SCRATCH:
298238fd1498Szrj     case PC:
298338fd1498Szrj     case CC0:
298438fd1498Szrj     CASE_CONST_ANY:
298538fd1498Szrj     case CONST:
298638fd1498Szrj     case LABEL_REF:
298738fd1498Szrj     case SYMBOL_REF:
298838fd1498Szrj       return 0;
298938fd1498Szrj 
299038fd1498Szrj     case LT:
299138fd1498Szrj     case LTU:
299238fd1498Szrj     case GT:
299338fd1498Szrj     case GTU:
299438fd1498Szrj     case LE:
299538fd1498Szrj     case LEU:
299638fd1498Szrj     case GE:
299738fd1498Szrj     case GEU:
299838fd1498Szrj       return 1;
299938fd1498Szrj 
300038fd1498Szrj     default:
300138fd1498Szrj       break;
300238fd1498Szrj     }
300338fd1498Szrj 
300438fd1498Szrj   len = GET_RTX_LENGTH (code);
300538fd1498Szrj   fmt = GET_RTX_FORMAT (code);
300638fd1498Szrj 
300738fd1498Szrj   for (i = 0; i < len; i++)
300838fd1498Szrj     {
300938fd1498Szrj       if (fmt[i] == 'e')
301038fd1498Szrj 	{
301138fd1498Szrj 	  if (inequality_comparisons_p (XEXP (x, i)))
301238fd1498Szrj 	    return 1;
301338fd1498Szrj 	}
301438fd1498Szrj       else if (fmt[i] == 'E')
301538fd1498Szrj 	{
301638fd1498Szrj 	  int j;
301738fd1498Szrj 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
301838fd1498Szrj 	    if (inequality_comparisons_p (XVECEXP (x, i, j)))
301938fd1498Szrj 	      return 1;
302038fd1498Szrj 	}
302138fd1498Szrj     }
302238fd1498Szrj 
302338fd1498Szrj   return 0;
302438fd1498Szrj }
302538fd1498Szrj 
302638fd1498Szrj /* Replace any occurrence of FROM in X with TO.  The function does
302738fd1498Szrj    not enter into CONST_DOUBLE for the replace.
302838fd1498Szrj 
302938fd1498Szrj    Note that copying is not done so X must not be shared unless all copies
303038fd1498Szrj    are to be modified.
303138fd1498Szrj 
303238fd1498Szrj    ALL_REGS is true if we want to replace all REGs equal to FROM, not just
303338fd1498Szrj    those pointer-equal ones.  */
303438fd1498Szrj 
303538fd1498Szrj rtx
replace_rtx(rtx x,rtx from,rtx to,bool all_regs)303638fd1498Szrj replace_rtx (rtx x, rtx from, rtx to, bool all_regs)
303738fd1498Szrj {
303838fd1498Szrj   int i, j;
303938fd1498Szrj   const char *fmt;
304038fd1498Szrj 
304138fd1498Szrj   if (x == from)
304238fd1498Szrj     return to;
304338fd1498Szrj 
304438fd1498Szrj   /* Allow this function to make replacements in EXPR_LISTs.  */
304538fd1498Szrj   if (x == 0)
304638fd1498Szrj     return 0;
304738fd1498Szrj 
304838fd1498Szrj   if (all_regs
304938fd1498Szrj       && REG_P (x)
305038fd1498Szrj       && REG_P (from)
305138fd1498Szrj       && REGNO (x) == REGNO (from))
305238fd1498Szrj     {
305338fd1498Szrj       gcc_assert (GET_MODE (x) == GET_MODE (from));
305438fd1498Szrj       return to;
305538fd1498Szrj     }
305638fd1498Szrj   else if (GET_CODE (x) == SUBREG)
305738fd1498Szrj     {
305838fd1498Szrj       rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to, all_regs);
305938fd1498Szrj 
306038fd1498Szrj       if (CONST_INT_P (new_rtx))
306138fd1498Szrj 	{
306238fd1498Szrj 	  x = simplify_subreg (GET_MODE (x), new_rtx,
306338fd1498Szrj 			       GET_MODE (SUBREG_REG (x)),
306438fd1498Szrj 			       SUBREG_BYTE (x));
306538fd1498Szrj 	  gcc_assert (x);
306638fd1498Szrj 	}
306738fd1498Szrj       else
306838fd1498Szrj 	SUBREG_REG (x) = new_rtx;
306938fd1498Szrj 
307038fd1498Szrj       return x;
307138fd1498Szrj     }
307238fd1498Szrj   else if (GET_CODE (x) == ZERO_EXTEND)
307338fd1498Szrj     {
307438fd1498Szrj       rtx new_rtx = replace_rtx (XEXP (x, 0), from, to, all_regs);
307538fd1498Szrj 
307638fd1498Szrj       if (CONST_INT_P (new_rtx))
307738fd1498Szrj 	{
307838fd1498Szrj 	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
307938fd1498Szrj 					new_rtx, GET_MODE (XEXP (x, 0)));
308038fd1498Szrj 	  gcc_assert (x);
308138fd1498Szrj 	}
308238fd1498Szrj       else
308338fd1498Szrj 	XEXP (x, 0) = new_rtx;
308438fd1498Szrj 
308538fd1498Szrj       return x;
308638fd1498Szrj     }
308738fd1498Szrj 
308838fd1498Szrj   fmt = GET_RTX_FORMAT (GET_CODE (x));
308938fd1498Szrj   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
309038fd1498Szrj     {
309138fd1498Szrj       if (fmt[i] == 'e')
309238fd1498Szrj 	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to, all_regs);
309338fd1498Szrj       else if (fmt[i] == 'E')
309438fd1498Szrj 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
309538fd1498Szrj 	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j),
309638fd1498Szrj 					   from, to, all_regs);
309738fd1498Szrj     }
309838fd1498Szrj 
309938fd1498Szrj   return x;
310038fd1498Szrj }
310138fd1498Szrj 
310238fd1498Szrj /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL.  Also track
310338fd1498Szrj    the change in LABEL_NUSES if UPDATE_LABEL_NUSES.  */
310438fd1498Szrj 
310538fd1498Szrj void
replace_label(rtx * loc,rtx old_label,rtx new_label,bool update_label_nuses)310638fd1498Szrj replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
310738fd1498Szrj {
310838fd1498Szrj   /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long.  */
310938fd1498Szrj   rtx x = *loc;
311038fd1498Szrj   if (JUMP_TABLE_DATA_P (x))
311138fd1498Szrj     {
311238fd1498Szrj       x = PATTERN (x);
311338fd1498Szrj       rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
311438fd1498Szrj       int len = GET_NUM_ELEM (vec);
311538fd1498Szrj       for (int i = 0; i < len; ++i)
311638fd1498Szrj 	{
311738fd1498Szrj 	  rtx ref = RTVEC_ELT (vec, i);
311838fd1498Szrj 	  if (XEXP (ref, 0) == old_label)
311938fd1498Szrj 	    {
312038fd1498Szrj 	      XEXP (ref, 0) = new_label;
312138fd1498Szrj 	      if (update_label_nuses)
312238fd1498Szrj 		{
312338fd1498Szrj 		  ++LABEL_NUSES (new_label);
312438fd1498Szrj 		  --LABEL_NUSES (old_label);
312538fd1498Szrj 		}
312638fd1498Szrj 	    }
312738fd1498Szrj 	}
312838fd1498Szrj       return;
312938fd1498Szrj     }
313038fd1498Szrj 
313138fd1498Szrj   /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
313238fd1498Szrj      field.  This is not handled by the iterator because it doesn't
313338fd1498Szrj      handle unprinted ('0') fields.  */
313438fd1498Szrj   if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
313538fd1498Szrj     JUMP_LABEL (x) = new_label;
313638fd1498Szrj 
313738fd1498Szrj   subrtx_ptr_iterator::array_type array;
313838fd1498Szrj   FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
313938fd1498Szrj     {
314038fd1498Szrj       rtx *loc = *iter;
314138fd1498Szrj       if (rtx x = *loc)
314238fd1498Szrj 	{
314338fd1498Szrj 	  if (GET_CODE (x) == SYMBOL_REF
314438fd1498Szrj 	      && CONSTANT_POOL_ADDRESS_P (x))
314538fd1498Szrj 	    {
314638fd1498Szrj 	      rtx c = get_pool_constant (x);
314738fd1498Szrj 	      if (rtx_referenced_p (old_label, c))
314838fd1498Szrj 		{
314938fd1498Szrj 		  /* Create a copy of constant C; replace the label inside
315038fd1498Szrj 		     but do not update LABEL_NUSES because uses in constant pool
315138fd1498Szrj 		     are not counted.  */
315238fd1498Szrj 		  rtx new_c = copy_rtx (c);
315338fd1498Szrj 		  replace_label (&new_c, old_label, new_label, false);
315438fd1498Szrj 
315538fd1498Szrj 		  /* Add the new constant NEW_C to constant pool and replace
315638fd1498Szrj 		     the old reference to constant by new reference.  */
315738fd1498Szrj 		  rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
315838fd1498Szrj 		  *loc = replace_rtx (x, x, XEXP (new_mem, 0));
315938fd1498Szrj 		}
316038fd1498Szrj 	    }
316138fd1498Szrj 
316238fd1498Szrj 	  if ((GET_CODE (x) == LABEL_REF
316338fd1498Szrj 	       || GET_CODE (x) == INSN_LIST)
316438fd1498Szrj 	      && XEXP (x, 0) == old_label)
316538fd1498Szrj 	    {
316638fd1498Szrj 	      XEXP (x, 0) = new_label;
316738fd1498Szrj 	      if (update_label_nuses)
316838fd1498Szrj 		{
316938fd1498Szrj 		  ++LABEL_NUSES (new_label);
317038fd1498Szrj 		  --LABEL_NUSES (old_label);
317138fd1498Szrj 		}
317238fd1498Szrj 	    }
317338fd1498Szrj 	}
317438fd1498Szrj     }
317538fd1498Szrj }
317638fd1498Szrj 
317738fd1498Szrj void
replace_label_in_insn(rtx_insn * insn,rtx_insn * old_label,rtx_insn * new_label,bool update_label_nuses)317838fd1498Szrj replace_label_in_insn (rtx_insn *insn, rtx_insn *old_label,
317938fd1498Szrj 		       rtx_insn *new_label, bool update_label_nuses)
318038fd1498Szrj {
318138fd1498Szrj   rtx insn_as_rtx = insn;
318238fd1498Szrj   replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
318338fd1498Szrj   gcc_checking_assert (insn_as_rtx == insn);
318438fd1498Szrj }
318538fd1498Szrj 
318638fd1498Szrj /* Return true if X is referenced in BODY.  */
318738fd1498Szrj 
318838fd1498Szrj bool
rtx_referenced_p(const_rtx x,const_rtx body)318938fd1498Szrj rtx_referenced_p (const_rtx x, const_rtx body)
319038fd1498Szrj {
319138fd1498Szrj   subrtx_iterator::array_type array;
319238fd1498Szrj   FOR_EACH_SUBRTX (iter, array, body, ALL)
319338fd1498Szrj     if (const_rtx y = *iter)
319438fd1498Szrj       {
319538fd1498Szrj 	/* Check if a label_ref Y refers to label X.  */
319638fd1498Szrj 	if (GET_CODE (y) == LABEL_REF
319738fd1498Szrj 	    && LABEL_P (x)
319838fd1498Szrj 	    && label_ref_label (y) == x)
319938fd1498Szrj 	  return true;
320038fd1498Szrj 
320138fd1498Szrj 	if (rtx_equal_p (x, y))
320238fd1498Szrj 	  return true;
320338fd1498Szrj 
320438fd1498Szrj 	/* If Y is a reference to pool constant traverse the constant.  */
320538fd1498Szrj 	if (GET_CODE (y) == SYMBOL_REF
320638fd1498Szrj 	    && CONSTANT_POOL_ADDRESS_P (y))
320738fd1498Szrj 	  iter.substitute (get_pool_constant (y));
320838fd1498Szrj       }
320938fd1498Szrj   return false;
321038fd1498Szrj }
321138fd1498Szrj 
321238fd1498Szrj /* If INSN is a tablejump return true and store the label (before jump table) to
321338fd1498Szrj    *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
321438fd1498Szrj 
321538fd1498Szrj bool
tablejump_p(const rtx_insn * insn,rtx_insn ** labelp,rtx_jump_table_data ** tablep)321638fd1498Szrj tablejump_p (const rtx_insn *insn, rtx_insn **labelp,
321738fd1498Szrj 	     rtx_jump_table_data **tablep)
321838fd1498Szrj {
321938fd1498Szrj   if (!JUMP_P (insn))
322038fd1498Szrj     return false;
322138fd1498Szrj 
322238fd1498Szrj   rtx target = JUMP_LABEL (insn);
322338fd1498Szrj   if (target == NULL_RTX || ANY_RETURN_P (target))
322438fd1498Szrj     return false;
322538fd1498Szrj 
322638fd1498Szrj   rtx_insn *label = as_a<rtx_insn *> (target);
322738fd1498Szrj   rtx_insn *table = next_insn (label);
322838fd1498Szrj   if (table == NULL_RTX || !JUMP_TABLE_DATA_P (table))
322938fd1498Szrj     return false;
323038fd1498Szrj 
323138fd1498Szrj   if (labelp)
323238fd1498Szrj     *labelp = label;
323338fd1498Szrj   if (tablep)
323438fd1498Szrj     *tablep = as_a <rtx_jump_table_data *> (table);
323538fd1498Szrj   return true;
323638fd1498Szrj }
323738fd1498Szrj 
323838fd1498Szrj /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
323938fd1498Szrj    constant that is not in the constant pool and not in the condition
324038fd1498Szrj    of an IF_THEN_ELSE.  */
324138fd1498Szrj 
324238fd1498Szrj static int
computed_jump_p_1(const_rtx x)324338fd1498Szrj computed_jump_p_1 (const_rtx x)
324438fd1498Szrj {
324538fd1498Szrj   const enum rtx_code code = GET_CODE (x);
324638fd1498Szrj   int i, j;
324738fd1498Szrj   const char *fmt;
324838fd1498Szrj 
324938fd1498Szrj   switch (code)
325038fd1498Szrj     {
325138fd1498Szrj     case LABEL_REF:
325238fd1498Szrj     case PC:
325338fd1498Szrj       return 0;
325438fd1498Szrj 
325538fd1498Szrj     case CONST:
325638fd1498Szrj     CASE_CONST_ANY:
325738fd1498Szrj     case SYMBOL_REF:
325838fd1498Szrj     case REG:
325938fd1498Szrj       return 1;
326038fd1498Szrj 
326138fd1498Szrj     case MEM:
326238fd1498Szrj       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
326338fd1498Szrj 		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
326438fd1498Szrj 
326538fd1498Szrj     case IF_THEN_ELSE:
326638fd1498Szrj       return (computed_jump_p_1 (XEXP (x, 1))
326738fd1498Szrj 	      || computed_jump_p_1 (XEXP (x, 2)));
326838fd1498Szrj 
326938fd1498Szrj     default:
327038fd1498Szrj       break;
327138fd1498Szrj     }
327238fd1498Szrj 
327338fd1498Szrj   fmt = GET_RTX_FORMAT (code);
327438fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
327538fd1498Szrj     {
327638fd1498Szrj       if (fmt[i] == 'e'
327738fd1498Szrj 	  && computed_jump_p_1 (XEXP (x, i)))
327838fd1498Szrj 	return 1;
327938fd1498Szrj 
328038fd1498Szrj       else if (fmt[i] == 'E')
328138fd1498Szrj 	for (j = 0; j < XVECLEN (x, i); j++)
328238fd1498Szrj 	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
328338fd1498Szrj 	    return 1;
328438fd1498Szrj     }
328538fd1498Szrj 
328638fd1498Szrj   return 0;
328738fd1498Szrj }
328838fd1498Szrj 
328938fd1498Szrj /* Return nonzero if INSN is an indirect jump (aka computed jump).
329038fd1498Szrj 
329138fd1498Szrj    Tablejumps and casesi insns are not considered indirect jumps;
329238fd1498Szrj    we can recognize them by a (use (label_ref)).  */
329338fd1498Szrj 
329438fd1498Szrj int
computed_jump_p(const rtx_insn * insn)329538fd1498Szrj computed_jump_p (const rtx_insn *insn)
329638fd1498Szrj {
329738fd1498Szrj   int i;
329838fd1498Szrj   if (JUMP_P (insn))
329938fd1498Szrj     {
330038fd1498Szrj       rtx pat = PATTERN (insn);
330138fd1498Szrj 
330238fd1498Szrj       /* If we have a JUMP_LABEL set, we're not a computed jump.  */
330338fd1498Szrj       if (JUMP_LABEL (insn) != NULL)
330438fd1498Szrj 	return 0;
330538fd1498Szrj 
330638fd1498Szrj       if (GET_CODE (pat) == PARALLEL)
330738fd1498Szrj 	{
330838fd1498Szrj 	  int len = XVECLEN (pat, 0);
330938fd1498Szrj 	  int has_use_labelref = 0;
331038fd1498Szrj 
331138fd1498Szrj 	  for (i = len - 1; i >= 0; i--)
331238fd1498Szrj 	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
331338fd1498Szrj 		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
331438fd1498Szrj 		    == LABEL_REF))
331538fd1498Szrj 	      {
331638fd1498Szrj 	        has_use_labelref = 1;
331738fd1498Szrj 	        break;
331838fd1498Szrj 	      }
331938fd1498Szrj 
332038fd1498Szrj 	  if (! has_use_labelref)
332138fd1498Szrj 	    for (i = len - 1; i >= 0; i--)
332238fd1498Szrj 	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
332338fd1498Szrj 		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
332438fd1498Szrj 		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
332538fd1498Szrj 		return 1;
332638fd1498Szrj 	}
332738fd1498Szrj       else if (GET_CODE (pat) == SET
332838fd1498Szrj 	       && SET_DEST (pat) == pc_rtx
332938fd1498Szrj 	       && computed_jump_p_1 (SET_SRC (pat)))
333038fd1498Szrj 	return 1;
333138fd1498Szrj     }
333238fd1498Szrj   return 0;
333338fd1498Szrj }
333438fd1498Szrj 
333538fd1498Szrj 
333638fd1498Szrj 
333738fd1498Szrj /* MEM has a PRE/POST-INC/DEC/MODIFY address X.  Extract the operands of
333838fd1498Szrj    the equivalent add insn and pass the result to FN, using DATA as the
333938fd1498Szrj    final argument.  */
334038fd1498Szrj 
334138fd1498Szrj static int
for_each_inc_dec_find_inc_dec(rtx mem,for_each_inc_dec_fn fn,void * data)334238fd1498Szrj for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
334338fd1498Szrj {
334438fd1498Szrj   rtx x = XEXP (mem, 0);
334538fd1498Szrj   switch (GET_CODE (x))
334638fd1498Szrj     {
334738fd1498Szrj     case PRE_INC:
334838fd1498Szrj     case POST_INC:
334938fd1498Szrj       {
335038fd1498Szrj 	poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
335138fd1498Szrj 	rtx r1 = XEXP (x, 0);
335238fd1498Szrj 	rtx c = gen_int_mode (size, GET_MODE (r1));
335338fd1498Szrj 	return fn (mem, x, r1, r1, c, data);
335438fd1498Szrj       }
335538fd1498Szrj 
335638fd1498Szrj     case PRE_DEC:
335738fd1498Szrj     case POST_DEC:
335838fd1498Szrj       {
335938fd1498Szrj 	poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
336038fd1498Szrj 	rtx r1 = XEXP (x, 0);
336138fd1498Szrj 	rtx c = gen_int_mode (-size, GET_MODE (r1));
336238fd1498Szrj 	return fn (mem, x, r1, r1, c, data);
336338fd1498Szrj       }
336438fd1498Szrj 
336538fd1498Szrj     case PRE_MODIFY:
336638fd1498Szrj     case POST_MODIFY:
336738fd1498Szrj       {
336838fd1498Szrj 	rtx r1 = XEXP (x, 0);
336938fd1498Szrj 	rtx add = XEXP (x, 1);
337038fd1498Szrj 	return fn (mem, x, r1, add, NULL, data);
337138fd1498Szrj       }
337238fd1498Szrj 
337338fd1498Szrj     default:
337438fd1498Szrj       gcc_unreachable ();
337538fd1498Szrj     }
337638fd1498Szrj }
337738fd1498Szrj 
337838fd1498Szrj /* Traverse *LOC looking for MEMs that have autoinc addresses.
337938fd1498Szrj    For each such autoinc operation found, call FN, passing it
338038fd1498Szrj    the innermost enclosing MEM, the operation itself, the RTX modified
338138fd1498Szrj    by the operation, two RTXs (the second may be NULL) that, once
338238fd1498Szrj    added, represent the value to be held by the modified RTX
338338fd1498Szrj    afterwards, and DATA.  FN is to return 0 to continue the
338438fd1498Szrj    traversal or any other value to have it returned to the caller of
338538fd1498Szrj    for_each_inc_dec.  */
338638fd1498Szrj 
338738fd1498Szrj int
for_each_inc_dec(rtx x,for_each_inc_dec_fn fn,void * data)338838fd1498Szrj for_each_inc_dec (rtx x,
338938fd1498Szrj 		  for_each_inc_dec_fn fn,
339038fd1498Szrj 		  void *data)
339138fd1498Szrj {
339238fd1498Szrj   subrtx_var_iterator::array_type array;
339338fd1498Szrj   FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
339438fd1498Szrj     {
339538fd1498Szrj       rtx mem = *iter;
339638fd1498Szrj       if (mem
339738fd1498Szrj 	  && MEM_P (mem)
339838fd1498Szrj 	  && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
339938fd1498Szrj 	{
340038fd1498Szrj 	  int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
340138fd1498Szrj 	  if (res != 0)
340238fd1498Szrj 	    return res;
340338fd1498Szrj 	  iter.skip_subrtxes ();
340438fd1498Szrj 	}
340538fd1498Szrj     }
340638fd1498Szrj   return 0;
340738fd1498Szrj }
340838fd1498Szrj 
340938fd1498Szrj 
341038fd1498Szrj /* Searches X for any reference to REGNO, returning the rtx of the
341138fd1498Szrj    reference found if any.  Otherwise, returns NULL_RTX.  */
341238fd1498Szrj 
341338fd1498Szrj rtx
regno_use_in(unsigned int regno,rtx x)341438fd1498Szrj regno_use_in (unsigned int regno, rtx x)
341538fd1498Szrj {
341638fd1498Szrj   const char *fmt;
341738fd1498Szrj   int i, j;
341838fd1498Szrj   rtx tem;
341938fd1498Szrj 
342038fd1498Szrj   if (REG_P (x) && REGNO (x) == regno)
342138fd1498Szrj     return x;
342238fd1498Szrj 
342338fd1498Szrj   fmt = GET_RTX_FORMAT (GET_CODE (x));
342438fd1498Szrj   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
342538fd1498Szrj     {
342638fd1498Szrj       if (fmt[i] == 'e')
342738fd1498Szrj 	{
342838fd1498Szrj 	  if ((tem = regno_use_in (regno, XEXP (x, i))))
342938fd1498Szrj 	    return tem;
343038fd1498Szrj 	}
343138fd1498Szrj       else if (fmt[i] == 'E')
343238fd1498Szrj 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
343338fd1498Szrj 	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
343438fd1498Szrj 	    return tem;
343538fd1498Szrj     }
343638fd1498Szrj 
343738fd1498Szrj   return NULL_RTX;
343838fd1498Szrj }
343938fd1498Szrj 
344038fd1498Szrj /* Return a value indicating whether OP, an operand of a commutative
344138fd1498Szrj    operation, is preferred as the first or second operand.  The more
344238fd1498Szrj    positive the value, the stronger the preference for being the first
344338fd1498Szrj    operand.  */
344438fd1498Szrj 
344538fd1498Szrj int
commutative_operand_precedence(rtx op)344638fd1498Szrj commutative_operand_precedence (rtx op)
344738fd1498Szrj {
344838fd1498Szrj   enum rtx_code code = GET_CODE (op);
344938fd1498Szrj 
345038fd1498Szrj   /* Constants always become the second operand.  Prefer "nice" constants.  */
345138fd1498Szrj   if (code == CONST_INT)
345238fd1498Szrj     return -10;
345338fd1498Szrj   if (code == CONST_WIDE_INT)
345438fd1498Szrj     return -9;
345538fd1498Szrj   if (code == CONST_POLY_INT)
345638fd1498Szrj     return -8;
345738fd1498Szrj   if (code == CONST_DOUBLE)
345838fd1498Szrj     return -8;
345938fd1498Szrj   if (code == CONST_FIXED)
346038fd1498Szrj     return -8;
346138fd1498Szrj   op = avoid_constant_pool_reference (op);
346238fd1498Szrj   code = GET_CODE (op);
346338fd1498Szrj 
346438fd1498Szrj   switch (GET_RTX_CLASS (code))
346538fd1498Szrj     {
346638fd1498Szrj     case RTX_CONST_OBJ:
346738fd1498Szrj       if (code == CONST_INT)
346838fd1498Szrj 	return -7;
346938fd1498Szrj       if (code == CONST_WIDE_INT)
347038fd1498Szrj 	return -6;
347138fd1498Szrj       if (code == CONST_POLY_INT)
347238fd1498Szrj 	return -5;
347338fd1498Szrj       if (code == CONST_DOUBLE)
347438fd1498Szrj 	return -5;
347538fd1498Szrj       if (code == CONST_FIXED)
347638fd1498Szrj 	return -5;
347738fd1498Szrj       return -4;
347838fd1498Szrj 
347938fd1498Szrj     case RTX_EXTRA:
348038fd1498Szrj       /* SUBREGs of objects should come second.  */
348138fd1498Szrj       if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
348238fd1498Szrj         return -3;
348338fd1498Szrj       return 0;
348438fd1498Szrj 
348538fd1498Szrj     case RTX_OBJ:
348638fd1498Szrj       /* Complex expressions should be the first, so decrease priority
348738fd1498Szrj          of objects.  Prefer pointer objects over non pointer objects.  */
348838fd1498Szrj       if ((REG_P (op) && REG_POINTER (op))
348938fd1498Szrj 	  || (MEM_P (op) && MEM_POINTER (op)))
349038fd1498Szrj 	return -1;
349138fd1498Szrj       return -2;
349238fd1498Szrj 
349338fd1498Szrj     case RTX_COMM_ARITH:
349438fd1498Szrj       /* Prefer operands that are themselves commutative to be first.
349538fd1498Szrj          This helps to make things linear.  In particular,
349638fd1498Szrj          (and (and (reg) (reg)) (not (reg))) is canonical.  */
349738fd1498Szrj       return 4;
349838fd1498Szrj 
349938fd1498Szrj     case RTX_BIN_ARITH:
350038fd1498Szrj       /* If only one operand is a binary expression, it will be the first
350138fd1498Szrj          operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
350238fd1498Szrj          is canonical, although it will usually be further simplified.  */
350338fd1498Szrj       return 2;
350438fd1498Szrj 
350538fd1498Szrj     case RTX_UNARY:
350638fd1498Szrj       /* Then prefer NEG and NOT.  */
350738fd1498Szrj       if (code == NEG || code == NOT)
350838fd1498Szrj         return 1;
350938fd1498Szrj       /* FALLTHRU */
351038fd1498Szrj 
351138fd1498Szrj     default:
351238fd1498Szrj       return 0;
351338fd1498Szrj     }
351438fd1498Szrj }
351538fd1498Szrj 
351638fd1498Szrj /* Return 1 iff it is necessary to swap operands of commutative operation
351738fd1498Szrj    in order to canonicalize expression.  */
351838fd1498Szrj 
351938fd1498Szrj bool
swap_commutative_operands_p(rtx x,rtx y)352038fd1498Szrj swap_commutative_operands_p (rtx x, rtx y)
352138fd1498Szrj {
352238fd1498Szrj   return (commutative_operand_precedence (x)
352338fd1498Szrj 	  < commutative_operand_precedence (y));
352438fd1498Szrj }
352538fd1498Szrj 
352638fd1498Szrj /* Return 1 if X is an autoincrement side effect and the register is
352738fd1498Szrj    not the stack pointer.  */
352838fd1498Szrj int
auto_inc_p(const_rtx x)352938fd1498Szrj auto_inc_p (const_rtx x)
353038fd1498Szrj {
353138fd1498Szrj   switch (GET_CODE (x))
353238fd1498Szrj     {
353338fd1498Szrj     case PRE_INC:
353438fd1498Szrj     case POST_INC:
353538fd1498Szrj     case PRE_DEC:
353638fd1498Szrj     case POST_DEC:
353738fd1498Szrj     case PRE_MODIFY:
353838fd1498Szrj     case POST_MODIFY:
353938fd1498Szrj       /* There are no REG_INC notes for SP.  */
354038fd1498Szrj       if (XEXP (x, 0) != stack_pointer_rtx)
354138fd1498Szrj 	return 1;
354238fd1498Szrj     default:
354338fd1498Szrj       break;
354438fd1498Szrj     }
354538fd1498Szrj   return 0;
354638fd1498Szrj }
354738fd1498Szrj 
354838fd1498Szrj /* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
354938fd1498Szrj int
loc_mentioned_in_p(rtx * loc,const_rtx in)355038fd1498Szrj loc_mentioned_in_p (rtx *loc, const_rtx in)
355138fd1498Szrj {
355238fd1498Szrj   enum rtx_code code;
355338fd1498Szrj   const char *fmt;
355438fd1498Szrj   int i, j;
355538fd1498Szrj 
355638fd1498Szrj   if (!in)
355738fd1498Szrj     return 0;
355838fd1498Szrj 
355938fd1498Szrj   code = GET_CODE (in);
356038fd1498Szrj   fmt = GET_RTX_FORMAT (code);
356138fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
356238fd1498Szrj     {
356338fd1498Szrj       if (fmt[i] == 'e')
356438fd1498Szrj 	{
356538fd1498Szrj 	  if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
356638fd1498Szrj 	    return 1;
356738fd1498Szrj 	}
356838fd1498Szrj       else if (fmt[i] == 'E')
356938fd1498Szrj 	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
357038fd1498Szrj 	  if (loc == &XVECEXP (in, i, j)
357138fd1498Szrj 	      || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
357238fd1498Szrj 	    return 1;
357338fd1498Szrj     }
357438fd1498Szrj   return 0;
357538fd1498Szrj }
357638fd1498Szrj 
357738fd1498Szrj /* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
357838fd1498Szrj    and SUBREG_BYTE, return the bit offset where the subreg begins
357938fd1498Szrj    (counting from the least significant bit of the operand).  */
358038fd1498Szrj 
358138fd1498Szrj poly_uint64
subreg_lsb_1(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 subreg_byte)358238fd1498Szrj subreg_lsb_1 (machine_mode outer_mode,
358338fd1498Szrj 	      machine_mode inner_mode,
358438fd1498Szrj 	      poly_uint64 subreg_byte)
358538fd1498Szrj {
358638fd1498Szrj   poly_uint64 subreg_end, trailing_bytes, byte_pos;
358738fd1498Szrj 
358838fd1498Szrj   /* A paradoxical subreg begins at bit position 0.  */
358938fd1498Szrj   if (paradoxical_subreg_p (outer_mode, inner_mode))
359038fd1498Szrj     return 0;
359138fd1498Szrj 
359238fd1498Szrj   subreg_end = subreg_byte + GET_MODE_SIZE (outer_mode);
359338fd1498Szrj   trailing_bytes = GET_MODE_SIZE (inner_mode) - subreg_end;
359438fd1498Szrj   if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
359538fd1498Szrj     byte_pos = trailing_bytes;
359638fd1498Szrj   else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
359738fd1498Szrj     byte_pos = subreg_byte;
359838fd1498Szrj   else
359938fd1498Szrj     {
360038fd1498Szrj       /* When bytes and words have opposite endianness, we must be able
360138fd1498Szrj 	 to split offsets into words and bytes at compile time.  */
360238fd1498Szrj       poly_uint64 leading_word_part
360338fd1498Szrj 	= force_align_down (subreg_byte, UNITS_PER_WORD);
360438fd1498Szrj       poly_uint64 trailing_word_part
360538fd1498Szrj 	= force_align_down (trailing_bytes, UNITS_PER_WORD);
360638fd1498Szrj       /* If the subreg crosses a word boundary ensure that
360738fd1498Szrj 	 it also begins and ends on a word boundary.  */
360838fd1498Szrj       gcc_assert (known_le (subreg_end - leading_word_part,
360938fd1498Szrj 			    (unsigned int) UNITS_PER_WORD)
361038fd1498Szrj 		  || (known_eq (leading_word_part, subreg_byte)
361138fd1498Szrj 		      && known_eq (trailing_word_part, trailing_bytes)));
361238fd1498Szrj       if (WORDS_BIG_ENDIAN)
361338fd1498Szrj 	byte_pos = trailing_word_part + (subreg_byte - leading_word_part);
361438fd1498Szrj       else
361538fd1498Szrj 	byte_pos = leading_word_part + (trailing_bytes - trailing_word_part);
361638fd1498Szrj     }
361738fd1498Szrj 
361838fd1498Szrj   return byte_pos * BITS_PER_UNIT;
361938fd1498Szrj }
362038fd1498Szrj 
362138fd1498Szrj /* Given a subreg X, return the bit offset where the subreg begins
362238fd1498Szrj    (counting from the least significant bit of the reg).  */
362338fd1498Szrj 
362438fd1498Szrj poly_uint64
subreg_lsb(const_rtx x)362538fd1498Szrj subreg_lsb (const_rtx x)
362638fd1498Szrj {
362738fd1498Szrj   return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
362838fd1498Szrj 		       SUBREG_BYTE (x));
362938fd1498Szrj }
363038fd1498Szrj 
363138fd1498Szrj /* Return the subreg byte offset for a subreg whose outer value has
363238fd1498Szrj    OUTER_BYTES bytes, whose inner value has INNER_BYTES bytes, and where
363338fd1498Szrj    there are LSB_SHIFT *bits* between the lsb of the outer value and the
363438fd1498Szrj    lsb of the inner value.  This is the inverse of the calculation
363538fd1498Szrj    performed by subreg_lsb_1 (which converts byte offsets to bit shifts).  */
363638fd1498Szrj 
363738fd1498Szrj poly_uint64
subreg_size_offset_from_lsb(poly_uint64 outer_bytes,poly_uint64 inner_bytes,poly_uint64 lsb_shift)363838fd1498Szrj subreg_size_offset_from_lsb (poly_uint64 outer_bytes, poly_uint64 inner_bytes,
363938fd1498Szrj 			     poly_uint64 lsb_shift)
364038fd1498Szrj {
364138fd1498Szrj   /* A paradoxical subreg begins at bit position 0.  */
364238fd1498Szrj   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
364338fd1498Szrj   if (maybe_gt (outer_bytes, inner_bytes))
364438fd1498Szrj     {
364538fd1498Szrj       gcc_checking_assert (known_eq (lsb_shift, 0U));
364638fd1498Szrj       return 0;
364738fd1498Szrj     }
364838fd1498Szrj 
364938fd1498Szrj   poly_uint64 lower_bytes = exact_div (lsb_shift, BITS_PER_UNIT);
365038fd1498Szrj   poly_uint64 upper_bytes = inner_bytes - (lower_bytes + outer_bytes);
365138fd1498Szrj   if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
365238fd1498Szrj     return upper_bytes;
365338fd1498Szrj   else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
365438fd1498Szrj     return lower_bytes;
365538fd1498Szrj   else
365638fd1498Szrj     {
365738fd1498Szrj       /* When bytes and words have opposite endianness, we must be able
365838fd1498Szrj 	 to split offsets into words and bytes at compile time.  */
365938fd1498Szrj       poly_uint64 lower_word_part = force_align_down (lower_bytes,
366038fd1498Szrj 						      UNITS_PER_WORD);
366138fd1498Szrj       poly_uint64 upper_word_part = force_align_down (upper_bytes,
366238fd1498Szrj 						      UNITS_PER_WORD);
366338fd1498Szrj       if (WORDS_BIG_ENDIAN)
366438fd1498Szrj 	return upper_word_part + (lower_bytes - lower_word_part);
366538fd1498Szrj       else
366638fd1498Szrj 	return lower_word_part + (upper_bytes - upper_word_part);
366738fd1498Szrj     }
366838fd1498Szrj }
366938fd1498Szrj 
367038fd1498Szrj /* Fill in information about a subreg of a hard register.
367138fd1498Szrj    xregno - A regno of an inner hard subreg_reg (or what will become one).
367238fd1498Szrj    xmode  - The mode of xregno.
367338fd1498Szrj    offset - The byte offset.
367438fd1498Szrj    ymode  - The mode of a top level SUBREG (or what may become one).
367538fd1498Szrj    info   - Pointer to structure to fill in.
367638fd1498Szrj 
367738fd1498Szrj    Rather than considering one particular inner register (and thus one
367838fd1498Szrj    particular "outer" register) in isolation, this function really uses
367938fd1498Szrj    XREGNO as a model for a sequence of isomorphic hard registers.  Thus the
368038fd1498Szrj    function does not check whether adding INFO->offset to XREGNO gives
368138fd1498Szrj    a valid hard register; even if INFO->offset + XREGNO is out of range,
368238fd1498Szrj    there might be another register of the same type that is in range.
368338fd1498Szrj    Likewise it doesn't check whether targetm.hard_regno_mode_ok accepts
368438fd1498Szrj    the new register, since that can depend on things like whether the final
368538fd1498Szrj    register number is even or odd.  Callers that want to check whether
368638fd1498Szrj    this particular subreg can be replaced by a simple (reg ...) should
368738fd1498Szrj    use simplify_subreg_regno.  */
368838fd1498Szrj 
368938fd1498Szrj void
subreg_get_info(unsigned int xregno,machine_mode xmode,poly_uint64 offset,machine_mode ymode,struct subreg_info * info)369038fd1498Szrj subreg_get_info (unsigned int xregno, machine_mode xmode,
369138fd1498Szrj 		 poly_uint64 offset, machine_mode ymode,
369238fd1498Szrj 		 struct subreg_info *info)
369338fd1498Szrj {
369438fd1498Szrj   unsigned int nregs_xmode, nregs_ymode;
369538fd1498Szrj 
369638fd1498Szrj   gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
369738fd1498Szrj 
369838fd1498Szrj   poly_uint64 xsize = GET_MODE_SIZE (xmode);
369938fd1498Szrj   poly_uint64 ysize = GET_MODE_SIZE (ymode);
370038fd1498Szrj 
370138fd1498Szrj   bool rknown = false;
370238fd1498Szrj 
370338fd1498Szrj   /* If the register representation of a non-scalar mode has holes in it,
370438fd1498Szrj      we expect the scalar units to be concatenated together, with the holes
370538fd1498Szrj      distributed evenly among the scalar units.  Each scalar unit must occupy
370638fd1498Szrj      at least one register.  */
370738fd1498Szrj   if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
370838fd1498Szrj     {
370938fd1498Szrj       /* As a consequence, we must be dealing with a constant number of
371038fd1498Szrj 	 scalars, and thus a constant offset and number of units.  */
371138fd1498Szrj       HOST_WIDE_INT coffset = offset.to_constant ();
371238fd1498Szrj       HOST_WIDE_INT cysize = ysize.to_constant ();
371338fd1498Szrj       nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
371438fd1498Szrj       unsigned int nunits = GET_MODE_NUNITS (xmode).to_constant ();
371538fd1498Szrj       scalar_mode xmode_unit = GET_MODE_INNER (xmode);
371638fd1498Szrj       gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
371738fd1498Szrj       gcc_assert (nregs_xmode
371838fd1498Szrj 		  == (nunits
371938fd1498Szrj 		      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
372038fd1498Szrj       gcc_assert (hard_regno_nregs (xregno, xmode)
372138fd1498Szrj 		  == hard_regno_nregs (xregno, xmode_unit) * nunits);
372238fd1498Szrj 
372338fd1498Szrj       /* You can only ask for a SUBREG of a value with holes in the middle
372438fd1498Szrj 	 if you don't cross the holes.  (Such a SUBREG should be done by
372538fd1498Szrj 	 picking a different register class, or doing it in memory if
372638fd1498Szrj 	 necessary.)  An example of a value with holes is XCmode on 32-bit
372738fd1498Szrj 	 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
372838fd1498Szrj 	 3 for each part, but in memory it's two 128-bit parts.
372938fd1498Szrj 	 Padding is assumed to be at the end (not necessarily the 'high part')
373038fd1498Szrj 	 of each unit.  */
373138fd1498Szrj       if ((coffset / GET_MODE_SIZE (xmode_unit) + 1 < nunits)
373238fd1498Szrj 	  && (coffset / GET_MODE_SIZE (xmode_unit)
373338fd1498Szrj 	      != ((coffset + cysize - 1) / GET_MODE_SIZE (xmode_unit))))
373438fd1498Szrj 	{
373538fd1498Szrj 	  info->representable_p = false;
373638fd1498Szrj 	  rknown = true;
373738fd1498Szrj 	}
373838fd1498Szrj     }
373938fd1498Szrj   else
374038fd1498Szrj     nregs_xmode = hard_regno_nregs (xregno, xmode);
374138fd1498Szrj 
374238fd1498Szrj   nregs_ymode = hard_regno_nregs (xregno, ymode);
374338fd1498Szrj 
374438fd1498Szrj   /* Subreg sizes must be ordered, so that we can tell whether they are
374538fd1498Szrj      partial, paradoxical or complete.  */
374638fd1498Szrj   gcc_checking_assert (ordered_p (xsize, ysize));
374738fd1498Szrj 
374838fd1498Szrj   /* Paradoxical subregs are otherwise valid.  */
374938fd1498Szrj   if (!rknown && known_eq (offset, 0U) && maybe_gt (ysize, xsize))
375038fd1498Szrj     {
375138fd1498Szrj       info->representable_p = true;
375238fd1498Szrj       /* If this is a big endian paradoxical subreg, which uses more
375338fd1498Szrj 	 actual hard registers than the original register, we must
375438fd1498Szrj 	 return a negative offset so that we find the proper highpart
375538fd1498Szrj 	 of the register.
375638fd1498Szrj 
375738fd1498Szrj 	 We assume that the ordering of registers within a multi-register
375838fd1498Szrj 	 value has a consistent endianness: if bytes and register words
375938fd1498Szrj 	 have different endianness, the hard registers that make up a
376038fd1498Szrj 	 multi-register value must be at least word-sized.  */
376138fd1498Szrj       if (REG_WORDS_BIG_ENDIAN)
376238fd1498Szrj 	info->offset = (int) nregs_xmode - (int) nregs_ymode;
376338fd1498Szrj       else
376438fd1498Szrj 	info->offset = 0;
376538fd1498Szrj       info->nregs = nregs_ymode;
376638fd1498Szrj       return;
376738fd1498Szrj     }
376838fd1498Szrj 
376938fd1498Szrj   /* If registers store different numbers of bits in the different
377038fd1498Szrj      modes, we cannot generally form this subreg.  */
377138fd1498Szrj   poly_uint64 regsize_xmode, regsize_ymode;
377238fd1498Szrj   if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
377338fd1498Szrj       && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
377438fd1498Szrj       && multiple_p (xsize, nregs_xmode, &regsize_xmode)
377538fd1498Szrj       && multiple_p (ysize, nregs_ymode, &regsize_ymode))
377638fd1498Szrj     {
377738fd1498Szrj       if (!rknown
377838fd1498Szrj 	  && ((nregs_ymode > 1 && maybe_gt (regsize_xmode, regsize_ymode))
377938fd1498Szrj 	      || (nregs_xmode > 1 && maybe_gt (regsize_ymode, regsize_xmode))))
378038fd1498Szrj 	{
378138fd1498Szrj 	  info->representable_p = false;
378238fd1498Szrj 	  if (!can_div_away_from_zero_p (ysize, regsize_xmode, &info->nregs)
378338fd1498Szrj 	      || !can_div_trunc_p (offset, regsize_xmode, &info->offset))
378438fd1498Szrj 	    /* Checked by validate_subreg.  We must know at compile time
378538fd1498Szrj 	       which inner registers are being accessed.  */
378638fd1498Szrj 	    gcc_unreachable ();
378738fd1498Szrj 	  return;
378838fd1498Szrj 	}
378938fd1498Szrj       /* It's not valid to extract a subreg of mode YMODE at OFFSET that
379038fd1498Szrj 	 would go outside of XMODE.  */
379138fd1498Szrj       if (!rknown && maybe_gt (ysize + offset, xsize))
379238fd1498Szrj 	{
379338fd1498Szrj 	  info->representable_p = false;
379438fd1498Szrj 	  info->nregs = nregs_ymode;
379538fd1498Szrj 	  if (!can_div_trunc_p (offset, regsize_xmode, &info->offset))
379638fd1498Szrj 	    /* Checked by validate_subreg.  We must know at compile time
379738fd1498Szrj 	       which inner registers are being accessed.  */
379838fd1498Szrj 	    gcc_unreachable ();
379938fd1498Szrj 	  return;
380038fd1498Szrj 	}
380138fd1498Szrj       /* Quick exit for the simple and common case of extracting whole
380238fd1498Szrj 	 subregisters from a multiregister value.  */
380338fd1498Szrj       /* ??? It would be better to integrate this into the code below,
380438fd1498Szrj 	 if we can generalize the concept enough and figure out how
380538fd1498Szrj 	 odd-sized modes can coexist with the other weird cases we support.  */
380638fd1498Szrj       HOST_WIDE_INT count;
380738fd1498Szrj       if (!rknown
380838fd1498Szrj 	  && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
380938fd1498Szrj 	  && known_eq (regsize_xmode, regsize_ymode)
381038fd1498Szrj 	  && constant_multiple_p (offset, regsize_ymode, &count))
381138fd1498Szrj 	{
381238fd1498Szrj 	  info->representable_p = true;
381338fd1498Szrj 	  info->nregs = nregs_ymode;
381438fd1498Szrj 	  info->offset = count;
381538fd1498Szrj 	  gcc_assert (info->offset + info->nregs <= (int) nregs_xmode);
381638fd1498Szrj 	  return;
381738fd1498Szrj 	}
381838fd1498Szrj     }
381938fd1498Szrj 
382038fd1498Szrj   /* Lowpart subregs are otherwise valid.  */
382138fd1498Szrj   if (!rknown && known_eq (offset, subreg_lowpart_offset (ymode, xmode)))
382238fd1498Szrj     {
382338fd1498Szrj       info->representable_p = true;
382438fd1498Szrj       rknown = true;
382538fd1498Szrj 
382638fd1498Szrj       if (known_eq (offset, 0U) || nregs_xmode == nregs_ymode)
382738fd1498Szrj 	{
382838fd1498Szrj 	  info->offset = 0;
382938fd1498Szrj 	  info->nregs = nregs_ymode;
383038fd1498Szrj 	  return;
383138fd1498Szrj 	}
383238fd1498Szrj     }
383338fd1498Szrj 
383438fd1498Szrj   /* Set NUM_BLOCKS to the number of independently-representable YMODE
383538fd1498Szrj      values there are in (reg:XMODE XREGNO).  We can view the register
383638fd1498Szrj      as consisting of this number of independent "blocks", where each
383738fd1498Szrj      block occupies NREGS_YMODE registers and contains exactly one
383838fd1498Szrj      representable YMODE value.  */
383938fd1498Szrj   gcc_assert ((nregs_xmode % nregs_ymode) == 0);
384038fd1498Szrj   unsigned int num_blocks = nregs_xmode / nregs_ymode;
384138fd1498Szrj 
384238fd1498Szrj   /* Calculate the number of bytes in each block.  This must always
384338fd1498Szrj      be exact, otherwise we don't know how to verify the constraint.
384438fd1498Szrj      These conditions may be relaxed but subreg_regno_offset would
384538fd1498Szrj      need to be redesigned.  */
384638fd1498Szrj   poly_uint64 bytes_per_block = exact_div (xsize, num_blocks);
384738fd1498Szrj 
384838fd1498Szrj   /* Get the number of the first block that contains the subreg and the byte
384938fd1498Szrj      offset of the subreg from the start of that block.  */
385038fd1498Szrj   unsigned int block_number;
385138fd1498Szrj   poly_uint64 subblock_offset;
385238fd1498Szrj   if (!can_div_trunc_p (offset, bytes_per_block, &block_number,
385338fd1498Szrj 			&subblock_offset))
385438fd1498Szrj     /* Checked by validate_subreg.  We must know at compile time which
385538fd1498Szrj        inner registers are being accessed.  */
385638fd1498Szrj     gcc_unreachable ();
385738fd1498Szrj 
385838fd1498Szrj   if (!rknown)
385938fd1498Szrj     {
386038fd1498Szrj       /* Only the lowpart of each block is representable.  */
386138fd1498Szrj       info->representable_p
386238fd1498Szrj 	= known_eq (subblock_offset,
386338fd1498Szrj 		    subreg_size_lowpart_offset (ysize, bytes_per_block));
386438fd1498Szrj       rknown = true;
386538fd1498Szrj     }
386638fd1498Szrj 
386738fd1498Szrj   /* We assume that the ordering of registers within a multi-register
386838fd1498Szrj      value has a consistent endianness: if bytes and register words
386938fd1498Szrj      have different endianness, the hard registers that make up a
387038fd1498Szrj      multi-register value must be at least word-sized.  */
387138fd1498Szrj   if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN)
387238fd1498Szrj     /* The block number we calculated above followed memory endianness.
387338fd1498Szrj        Convert it to register endianness by counting back from the end.
387438fd1498Szrj        (Note that, because of the assumption above, each block must be
387538fd1498Szrj        at least word-sized.)  */
387638fd1498Szrj     info->offset = (num_blocks - block_number - 1) * nregs_ymode;
387738fd1498Szrj   else
387838fd1498Szrj     info->offset = block_number * nregs_ymode;
387938fd1498Szrj   info->nregs = nregs_ymode;
388038fd1498Szrj }
388138fd1498Szrj 
388238fd1498Szrj /* This function returns the regno offset of a subreg expression.
388338fd1498Szrj    xregno - A regno of an inner hard subreg_reg (or what will become one).
388438fd1498Szrj    xmode  - The mode of xregno.
388538fd1498Szrj    offset - The byte offset.
388638fd1498Szrj    ymode  - The mode of a top level SUBREG (or what may become one).
388738fd1498Szrj    RETURN - The regno offset which would be used.  */
388838fd1498Szrj unsigned int
subreg_regno_offset(unsigned int xregno,machine_mode xmode,poly_uint64 offset,machine_mode ymode)388938fd1498Szrj subreg_regno_offset (unsigned int xregno, machine_mode xmode,
389038fd1498Szrj 		     poly_uint64 offset, machine_mode ymode)
389138fd1498Szrj {
389238fd1498Szrj   struct subreg_info info;
389338fd1498Szrj   subreg_get_info (xregno, xmode, offset, ymode, &info);
389438fd1498Szrj   return info.offset;
389538fd1498Szrj }
389638fd1498Szrj 
389738fd1498Szrj /* This function returns true when the offset is representable via
389838fd1498Szrj    subreg_offset in the given regno.
389938fd1498Szrj    xregno - A regno of an inner hard subreg_reg (or what will become one).
390038fd1498Szrj    xmode  - The mode of xregno.
390138fd1498Szrj    offset - The byte offset.
390238fd1498Szrj    ymode  - The mode of a top level SUBREG (or what may become one).
390338fd1498Szrj    RETURN - Whether the offset is representable.  */
390438fd1498Szrj bool
subreg_offset_representable_p(unsigned int xregno,machine_mode xmode,poly_uint64 offset,machine_mode ymode)390538fd1498Szrj subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
390638fd1498Szrj 			       poly_uint64 offset, machine_mode ymode)
390738fd1498Szrj {
390838fd1498Szrj   struct subreg_info info;
390938fd1498Szrj   subreg_get_info (xregno, xmode, offset, ymode, &info);
391038fd1498Szrj   return info.representable_p;
391138fd1498Szrj }
391238fd1498Szrj 
391338fd1498Szrj /* Return the number of a YMODE register to which
391438fd1498Szrj 
391538fd1498Szrj        (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
391638fd1498Szrj 
391738fd1498Szrj    can be simplified.  Return -1 if the subreg can't be simplified.
391838fd1498Szrj 
391938fd1498Szrj    XREGNO is a hard register number.  */
392038fd1498Szrj 
392138fd1498Szrj int
simplify_subreg_regno(unsigned int xregno,machine_mode xmode,poly_uint64 offset,machine_mode ymode)392238fd1498Szrj simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
392338fd1498Szrj 		       poly_uint64 offset, machine_mode ymode)
392438fd1498Szrj {
392538fd1498Szrj   struct subreg_info info;
392638fd1498Szrj   unsigned int yregno;
392738fd1498Szrj 
392838fd1498Szrj   /* Give the backend a chance to disallow the mode change.  */
392938fd1498Szrj   if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
393038fd1498Szrj       && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
393138fd1498Szrj       && !REG_CAN_CHANGE_MODE_P (xregno, xmode, ymode)
393238fd1498Szrj       /* We can use mode change in LRA for some transformations.  */
393338fd1498Szrj       && ! lra_in_progress)
393438fd1498Szrj     return -1;
393538fd1498Szrj 
393638fd1498Szrj   /* We shouldn't simplify stack-related registers.  */
393738fd1498Szrj   if ((!reload_completed || frame_pointer_needed)
393838fd1498Szrj       && xregno == FRAME_POINTER_REGNUM)
393938fd1498Szrj     return -1;
394038fd1498Szrj 
394138fd1498Szrj   if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
394238fd1498Szrj       && xregno == ARG_POINTER_REGNUM)
394338fd1498Szrj     return -1;
394438fd1498Szrj 
394538fd1498Szrj   if (xregno == STACK_POINTER_REGNUM
394638fd1498Szrj       /* We should convert hard stack register in LRA if it is
394738fd1498Szrj 	 possible.  */
394838fd1498Szrj       && ! lra_in_progress)
394938fd1498Szrj     return -1;
395038fd1498Szrj 
395138fd1498Szrj   /* Try to get the register offset.  */
395238fd1498Szrj   subreg_get_info (xregno, xmode, offset, ymode, &info);
395338fd1498Szrj   if (!info.representable_p)
395438fd1498Szrj     return -1;
395538fd1498Szrj 
395638fd1498Szrj   /* Make sure that the offsetted register value is in range.  */
395738fd1498Szrj   yregno = xregno + info.offset;
395838fd1498Szrj   if (!HARD_REGISTER_NUM_P (yregno))
395938fd1498Szrj     return -1;
396038fd1498Szrj 
396138fd1498Szrj   /* See whether (reg:YMODE YREGNO) is valid.
396238fd1498Szrj 
396338fd1498Szrj      ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
396438fd1498Szrj      This is a kludge to work around how complex FP arguments are passed
396538fd1498Szrj      on IA-64 and should be fixed.  See PR target/49226.  */
396638fd1498Szrj   if (!targetm.hard_regno_mode_ok (yregno, ymode)
396738fd1498Szrj       && targetm.hard_regno_mode_ok (xregno, xmode))
396838fd1498Szrj     return -1;
396938fd1498Szrj 
397038fd1498Szrj   return (int) yregno;
397138fd1498Szrj }
397238fd1498Szrj 
397338fd1498Szrj /* Return the final regno that a subreg expression refers to.  */
397438fd1498Szrj unsigned int
subreg_regno(const_rtx x)397538fd1498Szrj subreg_regno (const_rtx x)
397638fd1498Szrj {
397738fd1498Szrj   unsigned int ret;
397838fd1498Szrj   rtx subreg = SUBREG_REG (x);
397938fd1498Szrj   int regno = REGNO (subreg);
398038fd1498Szrj 
398138fd1498Szrj   ret = regno + subreg_regno_offset (regno,
398238fd1498Szrj 				     GET_MODE (subreg),
398338fd1498Szrj 				     SUBREG_BYTE (x),
398438fd1498Szrj 				     GET_MODE (x));
398538fd1498Szrj   return ret;
398638fd1498Szrj 
398738fd1498Szrj }
398838fd1498Szrj 
398938fd1498Szrj /* Return the number of registers that a subreg expression refers
399038fd1498Szrj    to.  */
399138fd1498Szrj unsigned int
subreg_nregs(const_rtx x)399238fd1498Szrj subreg_nregs (const_rtx x)
399338fd1498Szrj {
399438fd1498Szrj   return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
399538fd1498Szrj }
399638fd1498Szrj 
399738fd1498Szrj /* Return the number of registers that a subreg REG with REGNO
399838fd1498Szrj    expression refers to.  This is a copy of the rtlanal.c:subreg_nregs
399938fd1498Szrj    changed so that the regno can be passed in. */
400038fd1498Szrj 
400138fd1498Szrj unsigned int
subreg_nregs_with_regno(unsigned int regno,const_rtx x)400238fd1498Szrj subreg_nregs_with_regno (unsigned int regno, const_rtx x)
400338fd1498Szrj {
400438fd1498Szrj   struct subreg_info info;
400538fd1498Szrj   rtx subreg = SUBREG_REG (x);
400638fd1498Szrj 
400738fd1498Szrj   subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
400838fd1498Szrj 		   &info);
400938fd1498Szrj   return info.nregs;
401038fd1498Szrj }
401138fd1498Szrj 
401238fd1498Szrj struct parms_set_data
401338fd1498Szrj {
401438fd1498Szrj   int nregs;
401538fd1498Szrj   HARD_REG_SET regs;
401638fd1498Szrj };
401738fd1498Szrj 
401838fd1498Szrj /* Helper function for noticing stores to parameter registers.  */
401938fd1498Szrj static void
parms_set(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)402038fd1498Szrj parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
402138fd1498Szrj {
402238fd1498Szrj   struct parms_set_data *const d = (struct parms_set_data *) data;
402338fd1498Szrj   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
402438fd1498Szrj       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
402538fd1498Szrj     {
402638fd1498Szrj       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
402738fd1498Szrj       d->nregs--;
402838fd1498Szrj     }
402938fd1498Szrj }
403038fd1498Szrj 
403138fd1498Szrj /* Look backward for first parameter to be loaded.
403238fd1498Szrj    Note that loads of all parameters will not necessarily be
403338fd1498Szrj    found if CSE has eliminated some of them (e.g., an argument
403438fd1498Szrj    to the outer function is passed down as a parameter).
403538fd1498Szrj    Do not skip BOUNDARY.  */
403638fd1498Szrj rtx_insn *
find_first_parameter_load(rtx_insn * call_insn,rtx_insn * boundary)403738fd1498Szrj find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
403838fd1498Szrj {
403938fd1498Szrj   struct parms_set_data parm;
404038fd1498Szrj   rtx p;
404138fd1498Szrj   rtx_insn *before, *first_set;
404238fd1498Szrj 
404338fd1498Szrj   /* Since different machines initialize their parameter registers
404438fd1498Szrj      in different orders, assume nothing.  Collect the set of all
404538fd1498Szrj      parameter registers.  */
404638fd1498Szrj   CLEAR_HARD_REG_SET (parm.regs);
404738fd1498Szrj   parm.nregs = 0;
404838fd1498Szrj   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
404938fd1498Szrj     if (GET_CODE (XEXP (p, 0)) == USE
405038fd1498Szrj 	&& REG_P (XEXP (XEXP (p, 0), 0))
405138fd1498Szrj 	&& !STATIC_CHAIN_REG_P (XEXP (XEXP (p, 0), 0)))
405238fd1498Szrj       {
405338fd1498Szrj 	gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
405438fd1498Szrj 
405538fd1498Szrj 	/* We only care about registers which can hold function
405638fd1498Szrj 	   arguments.  */
405738fd1498Szrj 	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
405838fd1498Szrj 	  continue;
405938fd1498Szrj 
406038fd1498Szrj 	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
406138fd1498Szrj 	parm.nregs++;
406238fd1498Szrj       }
406338fd1498Szrj   before = call_insn;
406438fd1498Szrj   first_set = call_insn;
406538fd1498Szrj 
406638fd1498Szrj   /* Search backward for the first set of a register in this set.  */
406738fd1498Szrj   while (parm.nregs && before != boundary)
406838fd1498Szrj     {
406938fd1498Szrj       before = PREV_INSN (before);
407038fd1498Szrj 
407138fd1498Szrj       /* It is possible that some loads got CSEed from one call to
407238fd1498Szrj          another.  Stop in that case.  */
407338fd1498Szrj       if (CALL_P (before))
407438fd1498Szrj 	break;
407538fd1498Szrj 
407638fd1498Szrj       /* Our caller needs either ensure that we will find all sets
407738fd1498Szrj          (in case code has not been optimized yet), or take care
407838fd1498Szrj          for possible labels in a way by setting boundary to preceding
407938fd1498Szrj          CODE_LABEL.  */
408038fd1498Szrj       if (LABEL_P (before))
408138fd1498Szrj 	{
408238fd1498Szrj 	  gcc_assert (before == boundary);
408338fd1498Szrj 	  break;
408438fd1498Szrj 	}
408538fd1498Szrj 
408638fd1498Szrj       if (INSN_P (before))
408738fd1498Szrj 	{
408838fd1498Szrj 	  int nregs_old = parm.nregs;
408938fd1498Szrj 	  note_stores (PATTERN (before), parms_set, &parm);
409038fd1498Szrj 	  /* If we found something that did not set a parameter reg,
409138fd1498Szrj 	     we're done.  Do not keep going, as that might result
409238fd1498Szrj 	     in hoisting an insn before the setting of a pseudo
409338fd1498Szrj 	     that is used by the hoisted insn. */
409438fd1498Szrj 	  if (nregs_old != parm.nregs)
409538fd1498Szrj 	    first_set = before;
409638fd1498Szrj 	  else
409738fd1498Szrj 	    break;
409838fd1498Szrj 	}
409938fd1498Szrj     }
410038fd1498Szrj   return first_set;
410138fd1498Szrj }
410238fd1498Szrj 
410338fd1498Szrj /* Return true if we should avoid inserting code between INSN and preceding
410438fd1498Szrj    call instruction.  */
410538fd1498Szrj 
410638fd1498Szrj bool
keep_with_call_p(const rtx_insn * insn)410738fd1498Szrj keep_with_call_p (const rtx_insn *insn)
410838fd1498Szrj {
410938fd1498Szrj   rtx set;
411038fd1498Szrj 
411138fd1498Szrj   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
411238fd1498Szrj     {
411338fd1498Szrj       if (REG_P (SET_DEST (set))
411438fd1498Szrj 	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
411538fd1498Szrj 	  && fixed_regs[REGNO (SET_DEST (set))]
411638fd1498Szrj 	  && general_operand (SET_SRC (set), VOIDmode))
411738fd1498Szrj 	return true;
411838fd1498Szrj       if (REG_P (SET_SRC (set))
411938fd1498Szrj 	  && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
412038fd1498Szrj 	  && REG_P (SET_DEST (set))
412138fd1498Szrj 	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
412238fd1498Szrj 	return true;
412338fd1498Szrj       /* There may be a stack pop just after the call and before the store
412438fd1498Szrj 	 of the return register.  Search for the actual store when deciding
412538fd1498Szrj 	 if we can break or not.  */
412638fd1498Szrj       if (SET_DEST (set) == stack_pointer_rtx)
412738fd1498Szrj 	{
412838fd1498Szrj 	  /* This CONST_CAST is okay because next_nonnote_insn just
412938fd1498Szrj 	     returns its argument and we assign it to a const_rtx
413038fd1498Szrj 	     variable.  */
413138fd1498Szrj 	  const rtx_insn *i2
413238fd1498Szrj 	    = next_nonnote_insn (const_cast<rtx_insn *> (insn));
413338fd1498Szrj 	  if (i2 && keep_with_call_p (i2))
413438fd1498Szrj 	    return true;
413538fd1498Szrj 	}
413638fd1498Szrj     }
413738fd1498Szrj   return false;
413838fd1498Szrj }
413938fd1498Szrj 
414038fd1498Szrj /* Return true if LABEL is a target of JUMP_INSN.  This applies only
414138fd1498Szrj    to non-complex jumps.  That is, direct unconditional, conditional,
414238fd1498Szrj    and tablejumps, but not computed jumps or returns.  It also does
414338fd1498Szrj    not apply to the fallthru case of a conditional jump.  */
414438fd1498Szrj 
414538fd1498Szrj bool
label_is_jump_target_p(const_rtx label,const rtx_insn * jump_insn)414638fd1498Szrj label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
414738fd1498Szrj {
414838fd1498Szrj   rtx tmp = JUMP_LABEL (jump_insn);
414938fd1498Szrj   rtx_jump_table_data *table;
415038fd1498Szrj 
415138fd1498Szrj   if (label == tmp)
415238fd1498Szrj     return true;
415338fd1498Szrj 
415438fd1498Szrj   if (tablejump_p (jump_insn, NULL, &table))
415538fd1498Szrj     {
415638fd1498Szrj       rtvec vec = table->get_labels ();
415738fd1498Szrj       int i, veclen = GET_NUM_ELEM (vec);
415838fd1498Szrj 
415938fd1498Szrj       for (i = 0; i < veclen; ++i)
416038fd1498Szrj 	if (XEXP (RTVEC_ELT (vec, i), 0) == label)
416138fd1498Szrj 	  return true;
416238fd1498Szrj     }
416338fd1498Szrj 
416438fd1498Szrj   if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
416538fd1498Szrj     return true;
416638fd1498Szrj 
416738fd1498Szrj   return false;
416838fd1498Szrj }
416938fd1498Szrj 
417038fd1498Szrj 
417138fd1498Szrj /* Return an estimate of the cost of computing rtx X.
417238fd1498Szrj    One use is in cse, to decide which expression to keep in the hash table.
417338fd1498Szrj    Another is in rtl generation, to pick the cheapest way to multiply.
417438fd1498Szrj    Other uses like the latter are expected in the future.
417538fd1498Szrj 
417638fd1498Szrj    X appears as operand OPNO in an expression with code OUTER_CODE.
417738fd1498Szrj    SPEED specifies whether costs optimized for speed or size should
417838fd1498Szrj    be returned.  */
417938fd1498Szrj 
418038fd1498Szrj int
rtx_cost(rtx x,machine_mode mode,enum rtx_code outer_code,int opno,bool speed)418138fd1498Szrj rtx_cost (rtx x, machine_mode mode, enum rtx_code outer_code,
418238fd1498Szrj 	  int opno, bool speed)
418338fd1498Szrj {
418438fd1498Szrj   int i, j;
418538fd1498Szrj   enum rtx_code code;
418638fd1498Szrj   const char *fmt;
418738fd1498Szrj   int total;
418838fd1498Szrj   int factor;
418938fd1498Szrj 
419038fd1498Szrj   if (x == 0)
419138fd1498Szrj     return 0;
419238fd1498Szrj 
419338fd1498Szrj   if (GET_MODE (x) != VOIDmode)
419438fd1498Szrj     mode = GET_MODE (x);
419538fd1498Szrj 
419638fd1498Szrj   /* A size N times larger than UNITS_PER_WORD likely needs N times as
419738fd1498Szrj      many insns, taking N times as long.  */
419838fd1498Szrj   factor = estimated_poly_value (GET_MODE_SIZE (mode)) / UNITS_PER_WORD;
419938fd1498Szrj   if (factor == 0)
420038fd1498Szrj     factor = 1;
420138fd1498Szrj 
420238fd1498Szrj   /* Compute the default costs of certain things.
420338fd1498Szrj      Note that targetm.rtx_costs can override the defaults.  */
420438fd1498Szrj 
420538fd1498Szrj   code = GET_CODE (x);
420638fd1498Szrj   switch (code)
420738fd1498Szrj     {
420838fd1498Szrj     case MULT:
420938fd1498Szrj       /* Multiplication has time-complexity O(N*N), where N is the
421038fd1498Szrj 	 number of units (translated from digits) when using
421138fd1498Szrj 	 schoolbook long multiplication.  */
421238fd1498Szrj       total = factor * factor * COSTS_N_INSNS (5);
421338fd1498Szrj       break;
421438fd1498Szrj     case DIV:
421538fd1498Szrj     case UDIV:
421638fd1498Szrj     case MOD:
421738fd1498Szrj     case UMOD:
421838fd1498Szrj       /* Similarly, complexity for schoolbook long division.  */
421938fd1498Szrj       total = factor * factor * COSTS_N_INSNS (7);
422038fd1498Szrj       break;
422138fd1498Szrj     case USE:
422238fd1498Szrj       /* Used in combine.c as a marker.  */
422338fd1498Szrj       total = 0;
422438fd1498Szrj       break;
422538fd1498Szrj     case SET:
422638fd1498Szrj       /* A SET doesn't have a mode, so let's look at the SET_DEST to get
422738fd1498Szrj 	 the mode for the factor.  */
422838fd1498Szrj       mode = GET_MODE (SET_DEST (x));
422938fd1498Szrj       factor = estimated_poly_value (GET_MODE_SIZE (mode)) / UNITS_PER_WORD;
423038fd1498Szrj       if (factor == 0)
423138fd1498Szrj 	factor = 1;
423238fd1498Szrj       /* FALLTHRU */
423338fd1498Szrj     default:
423438fd1498Szrj       total = factor * COSTS_N_INSNS (1);
423538fd1498Szrj     }
423638fd1498Szrj 
423738fd1498Szrj   switch (code)
423838fd1498Szrj     {
423938fd1498Szrj     case REG:
424038fd1498Szrj       return 0;
424138fd1498Szrj 
424238fd1498Szrj     case SUBREG:
424338fd1498Szrj       total = 0;
424438fd1498Szrj       /* If we can't tie these modes, make this expensive.  The larger
424538fd1498Szrj 	 the mode, the more expensive it is.  */
424638fd1498Szrj       if (!targetm.modes_tieable_p (mode, GET_MODE (SUBREG_REG (x))))
424738fd1498Szrj 	return COSTS_N_INSNS (2 + factor);
424838fd1498Szrj       break;
424938fd1498Szrj 
425038fd1498Szrj     case TRUNCATE:
425138fd1498Szrj       if (targetm.modes_tieable_p (mode, GET_MODE (XEXP (x, 0))))
425238fd1498Szrj 	{
425338fd1498Szrj 	  total = 0;
425438fd1498Szrj 	  break;
425538fd1498Szrj 	}
425638fd1498Szrj       /* FALLTHRU */
425738fd1498Szrj     default:
425838fd1498Szrj       if (targetm.rtx_costs (x, mode, outer_code, opno, &total, speed))
425938fd1498Szrj 	return total;
426038fd1498Szrj       break;
426138fd1498Szrj     }
426238fd1498Szrj 
426338fd1498Szrj   /* Sum the costs of the sub-rtx's, plus cost of this operation,
426438fd1498Szrj      which is already in total.  */
426538fd1498Szrj 
426638fd1498Szrj   fmt = GET_RTX_FORMAT (code);
426738fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
426838fd1498Szrj     if (fmt[i] == 'e')
426938fd1498Szrj       total += rtx_cost (XEXP (x, i), mode, code, i, speed);
427038fd1498Szrj     else if (fmt[i] == 'E')
427138fd1498Szrj       for (j = 0; j < XVECLEN (x, i); j++)
427238fd1498Szrj 	total += rtx_cost (XVECEXP (x, i, j), mode, code, i, speed);
427338fd1498Szrj 
427438fd1498Szrj   return total;
427538fd1498Szrj }
427638fd1498Szrj 
427738fd1498Szrj /* Fill in the structure C with information about both speed and size rtx
427838fd1498Szrj    costs for X, which is operand OPNO in an expression with code OUTER.  */
427938fd1498Szrj 
428038fd1498Szrj void
get_full_rtx_cost(rtx x,machine_mode mode,enum rtx_code outer,int opno,struct full_rtx_costs * c)428138fd1498Szrj get_full_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno,
428238fd1498Szrj 		   struct full_rtx_costs *c)
428338fd1498Szrj {
428438fd1498Szrj   c->speed = rtx_cost (x, mode, outer, opno, true);
428538fd1498Szrj   c->size = rtx_cost (x, mode, outer, opno, false);
428638fd1498Szrj }
428738fd1498Szrj 
428838fd1498Szrj 
428938fd1498Szrj /* Return cost of address expression X.
429038fd1498Szrj    Expect that X is properly formed address reference.
429138fd1498Szrj 
429238fd1498Szrj    SPEED parameter specify whether costs optimized for speed or size should
429338fd1498Szrj    be returned.  */
429438fd1498Szrj 
429538fd1498Szrj int
address_cost(rtx x,machine_mode mode,addr_space_t as,bool speed)429638fd1498Szrj address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
429738fd1498Szrj {
429838fd1498Szrj   /* We may be asked for cost of various unusual addresses, such as operands
429938fd1498Szrj      of push instruction.  It is not worthwhile to complicate writing
430038fd1498Szrj      of the target hook by such cases.  */
430138fd1498Szrj 
430238fd1498Szrj   if (!memory_address_addr_space_p (mode, x, as))
430338fd1498Szrj     return 1000;
430438fd1498Szrj 
430538fd1498Szrj   return targetm.address_cost (x, mode, as, speed);
430638fd1498Szrj }
430738fd1498Szrj 
430838fd1498Szrj /* If the target doesn't override, compute the cost as with arithmetic.  */
430938fd1498Szrj 
431038fd1498Szrj int
default_address_cost(rtx x,machine_mode,addr_space_t,bool speed)431138fd1498Szrj default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
431238fd1498Szrj {
431338fd1498Szrj   return rtx_cost (x, Pmode, MEM, 0, speed);
431438fd1498Szrj }
431538fd1498Szrj 
431638fd1498Szrj 
431738fd1498Szrj unsigned HOST_WIDE_INT
nonzero_bits(const_rtx x,machine_mode mode)431838fd1498Szrj nonzero_bits (const_rtx x, machine_mode mode)
431938fd1498Szrj {
432038fd1498Szrj   if (mode == VOIDmode)
432138fd1498Szrj     mode = GET_MODE (x);
432238fd1498Szrj   scalar_int_mode int_mode;
432338fd1498Szrj   if (!is_a <scalar_int_mode> (mode, &int_mode))
432438fd1498Szrj     return GET_MODE_MASK (mode);
432538fd1498Szrj   return cached_nonzero_bits (x, int_mode, NULL_RTX, VOIDmode, 0);
432638fd1498Szrj }
432738fd1498Szrj 
432838fd1498Szrj unsigned int
num_sign_bit_copies(const_rtx x,machine_mode mode)432938fd1498Szrj num_sign_bit_copies (const_rtx x, machine_mode mode)
433038fd1498Szrj {
433138fd1498Szrj   if (mode == VOIDmode)
433238fd1498Szrj     mode = GET_MODE (x);
433338fd1498Szrj   scalar_int_mode int_mode;
433438fd1498Szrj   if (!is_a <scalar_int_mode> (mode, &int_mode))
433538fd1498Szrj     return 1;
433638fd1498Szrj   return cached_num_sign_bit_copies (x, int_mode, NULL_RTX, VOIDmode, 0);
433738fd1498Szrj }
433838fd1498Szrj 
433938fd1498Szrj /* Return true if nonzero_bits1 might recurse into both operands
434038fd1498Szrj    of X.  */
434138fd1498Szrj 
434238fd1498Szrj static inline bool
nonzero_bits_binary_arith_p(const_rtx x)434338fd1498Szrj nonzero_bits_binary_arith_p (const_rtx x)
434438fd1498Szrj {
434538fd1498Szrj   if (!ARITHMETIC_P (x))
434638fd1498Szrj     return false;
434738fd1498Szrj   switch (GET_CODE (x))
434838fd1498Szrj     {
434938fd1498Szrj     case AND:
435038fd1498Szrj     case XOR:
435138fd1498Szrj     case IOR:
435238fd1498Szrj     case UMIN:
435338fd1498Szrj     case UMAX:
435438fd1498Szrj     case SMIN:
435538fd1498Szrj     case SMAX:
435638fd1498Szrj     case PLUS:
435738fd1498Szrj     case MINUS:
435838fd1498Szrj     case MULT:
435938fd1498Szrj     case DIV:
436038fd1498Szrj     case UDIV:
436138fd1498Szrj     case MOD:
436238fd1498Szrj     case UMOD:
436338fd1498Szrj       return true;
436438fd1498Szrj     default:
436538fd1498Szrj       return false;
436638fd1498Szrj     }
436738fd1498Szrj }
436838fd1498Szrj 
436938fd1498Szrj /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
437038fd1498Szrj    It avoids exponential behavior in nonzero_bits1 when X has
437138fd1498Szrj    identical subexpressions on the first or the second level.  */
437238fd1498Szrj 
437338fd1498Szrj static unsigned HOST_WIDE_INT
cached_nonzero_bits(const_rtx x,scalar_int_mode mode,const_rtx known_x,machine_mode known_mode,unsigned HOST_WIDE_INT known_ret)437438fd1498Szrj cached_nonzero_bits (const_rtx x, scalar_int_mode mode, const_rtx known_x,
437538fd1498Szrj 		     machine_mode known_mode,
437638fd1498Szrj 		     unsigned HOST_WIDE_INT known_ret)
437738fd1498Szrj {
437838fd1498Szrj   if (x == known_x && mode == known_mode)
437938fd1498Szrj     return known_ret;
438038fd1498Szrj 
438138fd1498Szrj   /* Try to find identical subexpressions.  If found call
438238fd1498Szrj      nonzero_bits1 on X with the subexpressions as KNOWN_X and the
438338fd1498Szrj      precomputed value for the subexpression as KNOWN_RET.  */
438438fd1498Szrj 
438538fd1498Szrj   if (nonzero_bits_binary_arith_p (x))
438638fd1498Szrj     {
438738fd1498Szrj       rtx x0 = XEXP (x, 0);
438838fd1498Szrj       rtx x1 = XEXP (x, 1);
438938fd1498Szrj 
439038fd1498Szrj       /* Check the first level.  */
439138fd1498Szrj       if (x0 == x1)
439238fd1498Szrj 	return nonzero_bits1 (x, mode, x0, mode,
439338fd1498Szrj 			      cached_nonzero_bits (x0, mode, known_x,
439438fd1498Szrj 						   known_mode, known_ret));
439538fd1498Szrj 
439638fd1498Szrj       /* Check the second level.  */
439738fd1498Szrj       if (nonzero_bits_binary_arith_p (x0)
439838fd1498Szrj 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
439938fd1498Szrj 	return nonzero_bits1 (x, mode, x1, mode,
440038fd1498Szrj 			      cached_nonzero_bits (x1, mode, known_x,
440138fd1498Szrj 						   known_mode, known_ret));
440238fd1498Szrj 
440338fd1498Szrj       if (nonzero_bits_binary_arith_p (x1)
440438fd1498Szrj 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
440538fd1498Szrj 	return nonzero_bits1 (x, mode, x0, mode,
440638fd1498Szrj 			      cached_nonzero_bits (x0, mode, known_x,
440738fd1498Szrj 						   known_mode, known_ret));
440838fd1498Szrj     }
440938fd1498Szrj 
441038fd1498Szrj   return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
441138fd1498Szrj }
441238fd1498Szrj 
441338fd1498Szrj /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
441438fd1498Szrj    We don't let nonzero_bits recur into num_sign_bit_copies, because that
441538fd1498Szrj    is less useful.  We can't allow both, because that results in exponential
441638fd1498Szrj    run time recursion.  There is a nullstone testcase that triggered
441738fd1498Szrj    this.  This macro avoids accidental uses of num_sign_bit_copies.  */
441838fd1498Szrj #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
441938fd1498Szrj 
442038fd1498Szrj /* Given an expression, X, compute which bits in X can be nonzero.
442138fd1498Szrj    We don't care about bits outside of those defined in MODE.
442238fd1498Szrj 
442338fd1498Szrj    For most X this is simply GET_MODE_MASK (GET_MODE (X)), but if X is
442438fd1498Szrj    an arithmetic operation, we can do better.  */
442538fd1498Szrj 
442638fd1498Szrj static unsigned HOST_WIDE_INT
nonzero_bits1(const_rtx x,scalar_int_mode mode,const_rtx known_x,machine_mode known_mode,unsigned HOST_WIDE_INT known_ret)442738fd1498Szrj nonzero_bits1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
442838fd1498Szrj 	       machine_mode known_mode,
442938fd1498Szrj 	       unsigned HOST_WIDE_INT known_ret)
443038fd1498Szrj {
443138fd1498Szrj   unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
443238fd1498Szrj   unsigned HOST_WIDE_INT inner_nz;
443338fd1498Szrj   enum rtx_code code = GET_CODE (x);
443438fd1498Szrj   machine_mode inner_mode;
443538fd1498Szrj   unsigned int inner_width;
443638fd1498Szrj   scalar_int_mode xmode;
443738fd1498Szrj 
443838fd1498Szrj   unsigned int mode_width = GET_MODE_PRECISION (mode);
443938fd1498Szrj 
444038fd1498Szrj   if (CONST_INT_P (x))
444138fd1498Szrj     {
444238fd1498Szrj       if (SHORT_IMMEDIATES_SIGN_EXTEND
444338fd1498Szrj 	  && INTVAL (x) > 0
444438fd1498Szrj 	  && mode_width < BITS_PER_WORD
444538fd1498Szrj 	  && (UINTVAL (x) & (HOST_WIDE_INT_1U << (mode_width - 1))) != 0)
444638fd1498Szrj 	return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
444738fd1498Szrj 
444838fd1498Szrj       return UINTVAL (x);
444938fd1498Szrj     }
445038fd1498Szrj 
445138fd1498Szrj   if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
445238fd1498Szrj     return nonzero;
445338fd1498Szrj   unsigned int xmode_width = GET_MODE_PRECISION (xmode);
445438fd1498Szrj 
445538fd1498Szrj   /* If X is wider than MODE, use its mode instead.  */
445638fd1498Szrj   if (xmode_width > mode_width)
445738fd1498Szrj     {
445838fd1498Szrj       mode = xmode;
445938fd1498Szrj       nonzero = GET_MODE_MASK (mode);
446038fd1498Szrj       mode_width = xmode_width;
446138fd1498Szrj     }
446238fd1498Szrj 
446338fd1498Szrj   if (mode_width > HOST_BITS_PER_WIDE_INT)
446438fd1498Szrj     /* Our only callers in this case look for single bit values.  So
446538fd1498Szrj        just return the mode mask.  Those tests will then be false.  */
446638fd1498Szrj     return nonzero;
446738fd1498Szrj 
446838fd1498Szrj   /* If MODE is wider than X, but both are a single word for both the host
446938fd1498Szrj      and target machines, we can compute this from which bits of the object
447038fd1498Szrj      might be nonzero in its own mode, taking into account the fact that, on
447138fd1498Szrj      CISC machines, accessing an object in a wider mode generally causes the
447238fd1498Szrj      high-order bits to become undefined, so they are not known to be zero.
447358e805e6Szrj      We extend this reasoning to RISC machines for operations that might not
447458e805e6Szrj      operate on the full registers.  */
447538fd1498Szrj   if (mode_width > xmode_width
447638fd1498Szrj       && xmode_width <= BITS_PER_WORD
447738fd1498Szrj       && xmode_width <= HOST_BITS_PER_WIDE_INT
447858e805e6Szrj       && !(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
447938fd1498Szrj     {
448038fd1498Szrj       nonzero &= cached_nonzero_bits (x, xmode,
448138fd1498Szrj 				      known_x, known_mode, known_ret);
448238fd1498Szrj       nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode);
448338fd1498Szrj       return nonzero;
448438fd1498Szrj     }
448538fd1498Szrj 
448638fd1498Szrj   /* Please keep nonzero_bits_binary_arith_p above in sync with
448738fd1498Szrj      the code in the switch below.  */
448838fd1498Szrj   switch (code)
448938fd1498Szrj     {
449038fd1498Szrj     case REG:
449138fd1498Szrj #if defined(POINTERS_EXTEND_UNSIGNED)
449238fd1498Szrj       /* If pointers extend unsigned and this is a pointer in Pmode, say that
449338fd1498Szrj 	 all the bits above ptr_mode are known to be zero.  */
449438fd1498Szrj       /* As we do not know which address space the pointer is referring to,
449538fd1498Szrj 	 we can do this only if the target does not support different pointer
449638fd1498Szrj 	 or address modes depending on the address space.  */
449738fd1498Szrj       if (target_default_pointer_address_modes_p ()
449838fd1498Szrj 	  && POINTERS_EXTEND_UNSIGNED
449938fd1498Szrj 	  && xmode == Pmode
450038fd1498Szrj 	  && REG_POINTER (x)
450138fd1498Szrj 	  && !targetm.have_ptr_extend ())
450238fd1498Szrj 	nonzero &= GET_MODE_MASK (ptr_mode);
450338fd1498Szrj #endif
450438fd1498Szrj 
450538fd1498Szrj       /* Include declared information about alignment of pointers.  */
450638fd1498Szrj       /* ??? We don't properly preserve REG_POINTER changes across
450738fd1498Szrj 	 pointer-to-integer casts, so we can't trust it except for
450838fd1498Szrj 	 things that we know must be pointers.  See execute/960116-1.c.  */
450938fd1498Szrj       if ((x == stack_pointer_rtx
451038fd1498Szrj 	   || x == frame_pointer_rtx
451138fd1498Szrj 	   || x == arg_pointer_rtx)
451238fd1498Szrj 	  && REGNO_POINTER_ALIGN (REGNO (x)))
451338fd1498Szrj 	{
451438fd1498Szrj 	  unsigned HOST_WIDE_INT alignment
451538fd1498Szrj 	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
451638fd1498Szrj 
451738fd1498Szrj #ifdef PUSH_ROUNDING
451838fd1498Szrj 	  /* If PUSH_ROUNDING is defined, it is possible for the
451938fd1498Szrj 	     stack to be momentarily aligned only to that amount,
452038fd1498Szrj 	     so we pick the least alignment.  */
452138fd1498Szrj 	  if (x == stack_pointer_rtx && PUSH_ARGS)
452238fd1498Szrj 	    {
452338fd1498Szrj 	      poly_uint64 rounded_1 = PUSH_ROUNDING (poly_int64 (1));
452438fd1498Szrj 	      alignment = MIN (known_alignment (rounded_1), alignment);
452538fd1498Szrj 	    }
452638fd1498Szrj #endif
452738fd1498Szrj 
452838fd1498Szrj 	  nonzero &= ~(alignment - 1);
452938fd1498Szrj 	}
453038fd1498Szrj 
453138fd1498Szrj       {
453238fd1498Szrj 	unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
453338fd1498Szrj 	rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, xmode, mode,
453438fd1498Szrj 						  &nonzero_for_hook);
453538fd1498Szrj 
453638fd1498Szrj 	if (new_rtx)
453738fd1498Szrj 	  nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
453838fd1498Szrj 						   known_mode, known_ret);
453938fd1498Szrj 
454038fd1498Szrj 	return nonzero_for_hook;
454138fd1498Szrj       }
454238fd1498Szrj 
454338fd1498Szrj     case MEM:
454438fd1498Szrj       /* In many, if not most, RISC machines, reading a byte from memory
454538fd1498Szrj 	 zeros the rest of the register.  Noticing that fact saves a lot
454638fd1498Szrj 	 of extra zero-extends.  */
454738fd1498Szrj       if (load_extend_op (xmode) == ZERO_EXTEND)
454838fd1498Szrj 	nonzero &= GET_MODE_MASK (xmode);
454938fd1498Szrj       break;
455038fd1498Szrj 
455138fd1498Szrj     case EQ:  case NE:
455238fd1498Szrj     case UNEQ:  case LTGT:
455338fd1498Szrj     case GT:  case GTU:  case UNGT:
455438fd1498Szrj     case LT:  case LTU:  case UNLT:
455538fd1498Szrj     case GE:  case GEU:  case UNGE:
455638fd1498Szrj     case LE:  case LEU:  case UNLE:
455738fd1498Szrj     case UNORDERED: case ORDERED:
455838fd1498Szrj       /* If this produces an integer result, we know which bits are set.
455938fd1498Szrj 	 Code here used to clear bits outside the mode of X, but that is
456038fd1498Szrj 	 now done above.  */
456138fd1498Szrj       /* Mind that MODE is the mode the caller wants to look at this
456238fd1498Szrj 	 operation in, and not the actual operation mode.  We can wind
456338fd1498Szrj 	 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
456438fd1498Szrj 	 that describes the results of a vector compare.  */
456538fd1498Szrj       if (GET_MODE_CLASS (xmode) == MODE_INT
456638fd1498Szrj 	  && mode_width <= HOST_BITS_PER_WIDE_INT)
456738fd1498Szrj 	nonzero = STORE_FLAG_VALUE;
456838fd1498Szrj       break;
456938fd1498Szrj 
457038fd1498Szrj     case NEG:
457138fd1498Szrj #if 0
457238fd1498Szrj       /* Disabled to avoid exponential mutual recursion between nonzero_bits
457338fd1498Szrj 	 and num_sign_bit_copies.  */
457438fd1498Szrj       if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
457538fd1498Szrj 	nonzero = 1;
457638fd1498Szrj #endif
457738fd1498Szrj 
457838fd1498Szrj       if (xmode_width < mode_width)
457938fd1498Szrj 	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode));
458038fd1498Szrj       break;
458138fd1498Szrj 
458238fd1498Szrj     case ABS:
458338fd1498Szrj #if 0
458438fd1498Szrj       /* Disabled to avoid exponential mutual recursion between nonzero_bits
458538fd1498Szrj 	 and num_sign_bit_copies.  */
458638fd1498Szrj       if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
458738fd1498Szrj 	nonzero = 1;
458838fd1498Szrj #endif
458938fd1498Szrj       break;
459038fd1498Szrj 
459138fd1498Szrj     case TRUNCATE:
459238fd1498Szrj       nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
459338fd1498Szrj 				       known_x, known_mode, known_ret)
459438fd1498Szrj 		  & GET_MODE_MASK (mode));
459538fd1498Szrj       break;
459638fd1498Szrj 
459738fd1498Szrj     case ZERO_EXTEND:
459838fd1498Szrj       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
459938fd1498Szrj 				      known_x, known_mode, known_ret);
460038fd1498Szrj       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
460138fd1498Szrj 	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
460238fd1498Szrj       break;
460338fd1498Szrj 
460438fd1498Szrj     case SIGN_EXTEND:
460538fd1498Szrj       /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
460638fd1498Szrj 	 Otherwise, show all the bits in the outer mode but not the inner
460738fd1498Szrj 	 may be nonzero.  */
460838fd1498Szrj       inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
460938fd1498Szrj 				      known_x, known_mode, known_ret);
461038fd1498Szrj       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
461138fd1498Szrj 	{
461238fd1498Szrj 	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
461338fd1498Szrj 	  if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
461438fd1498Szrj 	    inner_nz |= (GET_MODE_MASK (mode)
461538fd1498Szrj 			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
461638fd1498Szrj 	}
461738fd1498Szrj 
461838fd1498Szrj       nonzero &= inner_nz;
461938fd1498Szrj       break;
462038fd1498Szrj 
462138fd1498Szrj     case AND:
462238fd1498Szrj       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
462338fd1498Szrj 				       known_x, known_mode, known_ret)
462438fd1498Szrj       		 & cached_nonzero_bits (XEXP (x, 1), mode,
462538fd1498Szrj 					known_x, known_mode, known_ret);
462638fd1498Szrj       break;
462738fd1498Szrj 
462838fd1498Szrj     case XOR:   case IOR:
462938fd1498Szrj     case UMIN:  case UMAX:  case SMIN:  case SMAX:
463038fd1498Szrj       {
463138fd1498Szrj 	unsigned HOST_WIDE_INT nonzero0
463238fd1498Szrj 	   = cached_nonzero_bits (XEXP (x, 0), mode,
463338fd1498Szrj 				  known_x, known_mode, known_ret);
463438fd1498Szrj 
463538fd1498Szrj 	/* Don't call nonzero_bits for the second time if it cannot change
463638fd1498Szrj 	   anything.  */
463738fd1498Szrj 	if ((nonzero & nonzero0) != nonzero)
463838fd1498Szrj 	  nonzero &= nonzero0
463938fd1498Szrj       		     | cached_nonzero_bits (XEXP (x, 1), mode,
464038fd1498Szrj 					    known_x, known_mode, known_ret);
464138fd1498Szrj       }
464238fd1498Szrj       break;
464338fd1498Szrj 
464438fd1498Szrj     case PLUS:  case MINUS:
464538fd1498Szrj     case MULT:
464638fd1498Szrj     case DIV:   case UDIV:
464738fd1498Szrj     case MOD:   case UMOD:
464838fd1498Szrj       /* We can apply the rules of arithmetic to compute the number of
464938fd1498Szrj 	 high- and low-order zero bits of these operations.  We start by
465038fd1498Szrj 	 computing the width (position of the highest-order nonzero bit)
465138fd1498Szrj 	 and the number of low-order zero bits for each value.  */
465238fd1498Szrj       {
465338fd1498Szrj 	unsigned HOST_WIDE_INT nz0
465438fd1498Szrj 	  = cached_nonzero_bits (XEXP (x, 0), mode,
465538fd1498Szrj 				 known_x, known_mode, known_ret);
465638fd1498Szrj 	unsigned HOST_WIDE_INT nz1
465738fd1498Szrj 	  = cached_nonzero_bits (XEXP (x, 1), mode,
465838fd1498Szrj 				 known_x, known_mode, known_ret);
465938fd1498Szrj 	int sign_index = xmode_width - 1;
466038fd1498Szrj 	int width0 = floor_log2 (nz0) + 1;
466138fd1498Szrj 	int width1 = floor_log2 (nz1) + 1;
466238fd1498Szrj 	int low0 = ctz_or_zero (nz0);
466338fd1498Szrj 	int low1 = ctz_or_zero (nz1);
466438fd1498Szrj 	unsigned HOST_WIDE_INT op0_maybe_minusp
466538fd1498Szrj 	  = nz0 & (HOST_WIDE_INT_1U << sign_index);
466638fd1498Szrj 	unsigned HOST_WIDE_INT op1_maybe_minusp
466738fd1498Szrj 	  = nz1 & (HOST_WIDE_INT_1U << sign_index);
466838fd1498Szrj 	unsigned int result_width = mode_width;
466938fd1498Szrj 	int result_low = 0;
467038fd1498Szrj 
467138fd1498Szrj 	switch (code)
467238fd1498Szrj 	  {
467338fd1498Szrj 	  case PLUS:
467438fd1498Szrj 	    result_width = MAX (width0, width1) + 1;
467538fd1498Szrj 	    result_low = MIN (low0, low1);
467638fd1498Szrj 	    break;
467738fd1498Szrj 	  case MINUS:
467838fd1498Szrj 	    result_low = MIN (low0, low1);
467938fd1498Szrj 	    break;
468038fd1498Szrj 	  case MULT:
468138fd1498Szrj 	    result_width = width0 + width1;
468238fd1498Szrj 	    result_low = low0 + low1;
468338fd1498Szrj 	    break;
468438fd1498Szrj 	  case DIV:
468538fd1498Szrj 	    if (width1 == 0)
468638fd1498Szrj 	      break;
468738fd1498Szrj 	    if (!op0_maybe_minusp && !op1_maybe_minusp)
468838fd1498Szrj 	      result_width = width0;
468938fd1498Szrj 	    break;
469038fd1498Szrj 	  case UDIV:
469138fd1498Szrj 	    if (width1 == 0)
469238fd1498Szrj 	      break;
469338fd1498Szrj 	    result_width = width0;
469438fd1498Szrj 	    break;
469538fd1498Szrj 	  case MOD:
469638fd1498Szrj 	    if (width1 == 0)
469738fd1498Szrj 	      break;
469838fd1498Szrj 	    if (!op0_maybe_minusp && !op1_maybe_minusp)
469938fd1498Szrj 	      result_width = MIN (width0, width1);
470038fd1498Szrj 	    result_low = MIN (low0, low1);
470138fd1498Szrj 	    break;
470238fd1498Szrj 	  case UMOD:
470338fd1498Szrj 	    if (width1 == 0)
470438fd1498Szrj 	      break;
470538fd1498Szrj 	    result_width = MIN (width0, width1);
470638fd1498Szrj 	    result_low = MIN (low0, low1);
470738fd1498Szrj 	    break;
470838fd1498Szrj 	  default:
470938fd1498Szrj 	    gcc_unreachable ();
471038fd1498Szrj 	  }
471138fd1498Szrj 
471238fd1498Szrj 	if (result_width < mode_width)
471338fd1498Szrj 	  nonzero &= (HOST_WIDE_INT_1U << result_width) - 1;
471438fd1498Szrj 
471538fd1498Szrj 	if (result_low > 0)
471638fd1498Szrj 	  nonzero &= ~((HOST_WIDE_INT_1U << result_low) - 1);
471738fd1498Szrj       }
471838fd1498Szrj       break;
471938fd1498Szrj 
472038fd1498Szrj     case ZERO_EXTRACT:
472138fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
472238fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
472338fd1498Szrj 	nonzero &= (HOST_WIDE_INT_1U << INTVAL (XEXP (x, 1))) - 1;
472438fd1498Szrj       break;
472538fd1498Szrj 
472638fd1498Szrj     case SUBREG:
472738fd1498Szrj       /* If this is a SUBREG formed for a promoted variable that has
472838fd1498Szrj 	 been zero-extended, we know that at least the high-order bits
472938fd1498Szrj 	 are zero, though others might be too.  */
473038fd1498Szrj       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
473138fd1498Szrj 	nonzero = GET_MODE_MASK (xmode)
473238fd1498Szrj 		  & cached_nonzero_bits (SUBREG_REG (x), xmode,
473338fd1498Szrj 					 known_x, known_mode, known_ret);
473438fd1498Szrj 
473538fd1498Szrj       /* If the inner mode is a single word for both the host and target
473638fd1498Szrj 	 machines, we can compute this from which bits of the inner
473738fd1498Szrj 	 object might be nonzero.  */
473838fd1498Szrj       inner_mode = GET_MODE (SUBREG_REG (x));
473938fd1498Szrj       if (GET_MODE_PRECISION (inner_mode).is_constant (&inner_width)
474038fd1498Szrj 	  && inner_width <= BITS_PER_WORD
474138fd1498Szrj 	  && inner_width <= HOST_BITS_PER_WIDE_INT)
474238fd1498Szrj 	{
474338fd1498Szrj 	  nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
474438fd1498Szrj 					  known_x, known_mode, known_ret);
474538fd1498Szrj 
474658e805e6Szrj           /* On a typical CISC machine, accessing an object in a wider mode
474738fd1498Szrj 	     causes the high-order bits to become undefined.  So they are
474858e805e6Szrj 	     not known to be zero.
474958e805e6Szrj 
475058e805e6Szrj 	     On a typical RISC machine, we only have to worry about the way
475158e805e6Szrj 	     loads are extended.  Otherwise, if we get a reload for the inner
475258e805e6Szrj 	     part, it may be loaded from the stack, and then we may lose all
475358e805e6Szrj 	     the zero bits that existed before the store to the stack.  */
475438fd1498Szrj 	  rtx_code extend_op;
475538fd1498Szrj 	  if ((!WORD_REGISTER_OPERATIONS
475638fd1498Szrj 	       || ((extend_op = load_extend_op (inner_mode)) == SIGN_EXTEND
475738fd1498Szrj 		   ? val_signbit_known_set_p (inner_mode, nonzero)
475838fd1498Szrj 		   : extend_op != ZERO_EXTEND)
475938fd1498Szrj 	       || (!MEM_P (SUBREG_REG (x)) && !REG_P (SUBREG_REG (x))))
476038fd1498Szrj 	      && xmode_width > inner_width)
476138fd1498Szrj 	    nonzero
476238fd1498Szrj 	      |= (GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (inner_mode));
476338fd1498Szrj 	}
476438fd1498Szrj       break;
476538fd1498Szrj 
476638fd1498Szrj     case ASHIFT:
476738fd1498Szrj     case ASHIFTRT:
476838fd1498Szrj     case LSHIFTRT:
476938fd1498Szrj     case ROTATE:
477038fd1498Szrj     case ROTATERT:
477138fd1498Szrj       /* The nonzero bits are in two classes: any bits within MODE
477238fd1498Szrj 	 that aren't in xmode are always significant.  The rest of the
477338fd1498Szrj 	 nonzero bits are those that are significant in the operand of
477438fd1498Szrj 	 the shift when shifted the appropriate number of bits.  This
477538fd1498Szrj 	 shows that high-order bits are cleared by the right shift and
477638fd1498Szrj 	 low-order bits by left shifts.  */
477738fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
477838fd1498Szrj 	  && INTVAL (XEXP (x, 1)) >= 0
477938fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
478038fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < xmode_width)
478138fd1498Szrj 	{
478238fd1498Szrj 	  int count = INTVAL (XEXP (x, 1));
478338fd1498Szrj 	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (xmode);
478438fd1498Szrj 	  unsigned HOST_WIDE_INT op_nonzero
478538fd1498Szrj 	    = cached_nonzero_bits (XEXP (x, 0), mode,
478638fd1498Szrj 				   known_x, known_mode, known_ret);
478738fd1498Szrj 	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
478838fd1498Szrj 	  unsigned HOST_WIDE_INT outer = 0;
478938fd1498Szrj 
479038fd1498Szrj 	  if (mode_width > xmode_width)
479138fd1498Szrj 	    outer = (op_nonzero & nonzero & ~mode_mask);
479238fd1498Szrj 
479338fd1498Szrj 	  switch (code)
479438fd1498Szrj 	    {
479538fd1498Szrj 	    case ASHIFT:
479638fd1498Szrj 	      inner <<= count;
479738fd1498Szrj 	      break;
479838fd1498Szrj 
479938fd1498Szrj 	    case LSHIFTRT:
480038fd1498Szrj 	      inner >>= count;
480138fd1498Szrj 	      break;
480238fd1498Szrj 
480338fd1498Szrj 	    case ASHIFTRT:
480438fd1498Szrj 	      inner >>= count;
480538fd1498Szrj 
480638fd1498Szrj 	      /* If the sign bit may have been nonzero before the shift, we
480738fd1498Szrj 		 need to mark all the places it could have been copied to
480838fd1498Szrj 		 by the shift as possibly nonzero.  */
480938fd1498Szrj 	      if (inner & (HOST_WIDE_INT_1U << (xmode_width - 1 - count)))
481038fd1498Szrj 		inner |= (((HOST_WIDE_INT_1U << count) - 1)
481138fd1498Szrj 			  << (xmode_width - count));
481238fd1498Szrj 	      break;
481338fd1498Szrj 
481438fd1498Szrj 	    case ROTATE:
481538fd1498Szrj 	      inner = (inner << (count % xmode_width)
481638fd1498Szrj 		       | (inner >> (xmode_width - (count % xmode_width))))
481738fd1498Szrj 		      & mode_mask;
481838fd1498Szrj 	      break;
481938fd1498Szrj 
482038fd1498Szrj 	    case ROTATERT:
482138fd1498Szrj 	      inner = (inner >> (count % xmode_width)
482238fd1498Szrj 		       | (inner << (xmode_width - (count % xmode_width))))
482338fd1498Szrj 		      & mode_mask;
482438fd1498Szrj 	      break;
482538fd1498Szrj 
482638fd1498Szrj 	    default:
482738fd1498Szrj 	      gcc_unreachable ();
482838fd1498Szrj 	    }
482938fd1498Szrj 
483038fd1498Szrj 	  nonzero &= (outer | inner);
483138fd1498Szrj 	}
483238fd1498Szrj       break;
483338fd1498Szrj 
483438fd1498Szrj     case FFS:
483538fd1498Szrj     case POPCOUNT:
483638fd1498Szrj       /* This is at most the number of bits in the mode.  */
483738fd1498Szrj       nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
483838fd1498Szrj       break;
483938fd1498Szrj 
484038fd1498Szrj     case CLZ:
484138fd1498Szrj       /* If CLZ has a known value at zero, then the nonzero bits are
484238fd1498Szrj 	 that value, plus the number of bits in the mode minus one.  */
484338fd1498Szrj       if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
484438fd1498Szrj 	nonzero
484538fd1498Szrj 	  |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
484638fd1498Szrj       else
484738fd1498Szrj 	nonzero = -1;
484838fd1498Szrj       break;
484938fd1498Szrj 
485038fd1498Szrj     case CTZ:
485138fd1498Szrj       /* If CTZ has a known value at zero, then the nonzero bits are
485238fd1498Szrj 	 that value, plus the number of bits in the mode minus one.  */
485338fd1498Szrj       if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
485438fd1498Szrj 	nonzero
485538fd1498Szrj 	  |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
485638fd1498Szrj       else
485738fd1498Szrj 	nonzero = -1;
485838fd1498Szrj       break;
485938fd1498Szrj 
486038fd1498Szrj     case CLRSB:
486138fd1498Szrj       /* This is at most the number of bits in the mode minus 1.  */
486238fd1498Szrj       nonzero = (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
486338fd1498Szrj       break;
486438fd1498Szrj 
486538fd1498Szrj     case PARITY:
486638fd1498Szrj       nonzero = 1;
486738fd1498Szrj       break;
486838fd1498Szrj 
486938fd1498Szrj     case IF_THEN_ELSE:
487038fd1498Szrj       {
487138fd1498Szrj 	unsigned HOST_WIDE_INT nonzero_true
487238fd1498Szrj 	  = cached_nonzero_bits (XEXP (x, 1), mode,
487338fd1498Szrj 				 known_x, known_mode, known_ret);
487438fd1498Szrj 
487538fd1498Szrj 	/* Don't call nonzero_bits for the second time if it cannot change
487638fd1498Szrj 	   anything.  */
487738fd1498Szrj 	if ((nonzero & nonzero_true) != nonzero)
487838fd1498Szrj 	  nonzero &= nonzero_true
487938fd1498Szrj       		     | cached_nonzero_bits (XEXP (x, 2), mode,
488038fd1498Szrj 					    known_x, known_mode, known_ret);
488138fd1498Szrj       }
488238fd1498Szrj       break;
488338fd1498Szrj 
488438fd1498Szrj     default:
488538fd1498Szrj       break;
488638fd1498Szrj     }
488738fd1498Szrj 
488838fd1498Szrj   return nonzero;
488938fd1498Szrj }
489038fd1498Szrj 
489138fd1498Szrj /* See the macro definition above.  */
489238fd1498Szrj #undef cached_num_sign_bit_copies
489338fd1498Szrj 
489438fd1498Szrj 
489538fd1498Szrj /* Return true if num_sign_bit_copies1 might recurse into both operands
489638fd1498Szrj    of X.  */
489738fd1498Szrj 
489838fd1498Szrj static inline bool
num_sign_bit_copies_binary_arith_p(const_rtx x)489938fd1498Szrj num_sign_bit_copies_binary_arith_p (const_rtx x)
490038fd1498Szrj {
490138fd1498Szrj   if (!ARITHMETIC_P (x))
490238fd1498Szrj     return false;
490338fd1498Szrj   switch (GET_CODE (x))
490438fd1498Szrj     {
490538fd1498Szrj     case IOR:
490638fd1498Szrj     case AND:
490738fd1498Szrj     case XOR:
490838fd1498Szrj     case SMIN:
490938fd1498Szrj     case SMAX:
491038fd1498Szrj     case UMIN:
491138fd1498Szrj     case UMAX:
491238fd1498Szrj     case PLUS:
491338fd1498Szrj     case MINUS:
491438fd1498Szrj     case MULT:
491538fd1498Szrj       return true;
491638fd1498Szrj     default:
491738fd1498Szrj       return false;
491838fd1498Szrj     }
491938fd1498Szrj }
492038fd1498Szrj 
492138fd1498Szrj /* The function cached_num_sign_bit_copies is a wrapper around
492238fd1498Szrj    num_sign_bit_copies1.  It avoids exponential behavior in
492338fd1498Szrj    num_sign_bit_copies1 when X has identical subexpressions on the
492438fd1498Szrj    first or the second level.  */
492538fd1498Szrj 
492638fd1498Szrj static unsigned int
cached_num_sign_bit_copies(const_rtx x,scalar_int_mode mode,const_rtx known_x,machine_mode known_mode,unsigned int known_ret)492738fd1498Szrj cached_num_sign_bit_copies (const_rtx x, scalar_int_mode mode,
492838fd1498Szrj 			    const_rtx known_x, machine_mode known_mode,
492938fd1498Szrj 			    unsigned int known_ret)
493038fd1498Szrj {
493138fd1498Szrj   if (x == known_x && mode == known_mode)
493238fd1498Szrj     return known_ret;
493338fd1498Szrj 
493438fd1498Szrj   /* Try to find identical subexpressions.  If found call
493538fd1498Szrj      num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
493638fd1498Szrj      the precomputed value for the subexpression as KNOWN_RET.  */
493738fd1498Szrj 
493838fd1498Szrj   if (num_sign_bit_copies_binary_arith_p (x))
493938fd1498Szrj     {
494038fd1498Szrj       rtx x0 = XEXP (x, 0);
494138fd1498Szrj       rtx x1 = XEXP (x, 1);
494238fd1498Szrj 
494338fd1498Szrj       /* Check the first level.  */
494438fd1498Szrj       if (x0 == x1)
494538fd1498Szrj 	return
494638fd1498Szrj 	  num_sign_bit_copies1 (x, mode, x0, mode,
494738fd1498Szrj 				cached_num_sign_bit_copies (x0, mode, known_x,
494838fd1498Szrj 							    known_mode,
494938fd1498Szrj 							    known_ret));
495038fd1498Szrj 
495138fd1498Szrj       /* Check the second level.  */
495238fd1498Szrj       if (num_sign_bit_copies_binary_arith_p (x0)
495338fd1498Szrj 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
495438fd1498Szrj 	return
495538fd1498Szrj 	  num_sign_bit_copies1 (x, mode, x1, mode,
495638fd1498Szrj 				cached_num_sign_bit_copies (x1, mode, known_x,
495738fd1498Szrj 							    known_mode,
495838fd1498Szrj 							    known_ret));
495938fd1498Szrj 
496038fd1498Szrj       if (num_sign_bit_copies_binary_arith_p (x1)
496138fd1498Szrj 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
496238fd1498Szrj 	return
496338fd1498Szrj 	  num_sign_bit_copies1 (x, mode, x0, mode,
496438fd1498Szrj 				cached_num_sign_bit_copies (x0, mode, known_x,
496538fd1498Szrj 							    known_mode,
496638fd1498Szrj 							    known_ret));
496738fd1498Szrj     }
496838fd1498Szrj 
496938fd1498Szrj   return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
497038fd1498Szrj }
497138fd1498Szrj 
497238fd1498Szrj /* Return the number of bits at the high-order end of X that are known to
497338fd1498Szrj    be equal to the sign bit.  X will be used in mode MODE.  The returned
497438fd1498Szrj    value will always be between 1 and the number of bits in MODE.  */
497538fd1498Szrj 
497638fd1498Szrj static unsigned int
num_sign_bit_copies1(const_rtx x,scalar_int_mode mode,const_rtx known_x,machine_mode known_mode,unsigned int known_ret)497738fd1498Szrj num_sign_bit_copies1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
497838fd1498Szrj 		      machine_mode known_mode,
497938fd1498Szrj 		      unsigned int known_ret)
498038fd1498Szrj {
498138fd1498Szrj   enum rtx_code code = GET_CODE (x);
498238fd1498Szrj   unsigned int bitwidth = GET_MODE_PRECISION (mode);
498338fd1498Szrj   int num0, num1, result;
498438fd1498Szrj   unsigned HOST_WIDE_INT nonzero;
498538fd1498Szrj 
498638fd1498Szrj   if (CONST_INT_P (x))
498738fd1498Szrj     {
498838fd1498Szrj       /* If the constant is negative, take its 1's complement and remask.
498938fd1498Szrj 	 Then see how many zero bits we have.  */
499038fd1498Szrj       nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
499138fd1498Szrj       if (bitwidth <= HOST_BITS_PER_WIDE_INT
499238fd1498Szrj 	  && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
499338fd1498Szrj 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
499438fd1498Szrj 
499538fd1498Szrj       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
499638fd1498Szrj     }
499738fd1498Szrj 
499838fd1498Szrj   scalar_int_mode xmode, inner_mode;
499938fd1498Szrj   if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
500038fd1498Szrj     return 1;
500138fd1498Szrj 
500238fd1498Szrj   unsigned int xmode_width = GET_MODE_PRECISION (xmode);
500338fd1498Szrj 
500438fd1498Szrj   /* For a smaller mode, just ignore the high bits.  */
500538fd1498Szrj   if (bitwidth < xmode_width)
500638fd1498Szrj     {
500738fd1498Szrj       num0 = cached_num_sign_bit_copies (x, xmode,
500838fd1498Szrj 					 known_x, known_mode, known_ret);
500938fd1498Szrj       return MAX (1, num0 - (int) (xmode_width - bitwidth));
501038fd1498Szrj     }
501138fd1498Szrj 
501238fd1498Szrj   if (bitwidth > xmode_width)
501338fd1498Szrj     {
501438fd1498Szrj       /* If this machine does not do all register operations on the entire
501538fd1498Szrj 	 register and MODE is wider than the mode of X, we can say nothing
501658e805e6Szrj 	 at all about the high-order bits.  We extend this reasoning to RISC
501758e805e6Szrj 	 machines for operations that might not operate on full registers.  */
501858e805e6Szrj       if (!(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
501938fd1498Szrj 	return 1;
502038fd1498Szrj 
502138fd1498Szrj       /* Likewise on machines that do, if the mode of the object is smaller
502238fd1498Szrj 	 than a word and loads of that size don't sign extend, we can say
502338fd1498Szrj 	 nothing about the high order bits.  */
502438fd1498Szrj       if (xmode_width < BITS_PER_WORD
502538fd1498Szrj 	  && load_extend_op (xmode) != SIGN_EXTEND)
502638fd1498Szrj 	return 1;
502738fd1498Szrj     }
502838fd1498Szrj 
502938fd1498Szrj   /* Please keep num_sign_bit_copies_binary_arith_p above in sync with
503038fd1498Szrj      the code in the switch below.  */
503138fd1498Szrj   switch (code)
503238fd1498Szrj     {
503338fd1498Szrj     case REG:
503438fd1498Szrj 
503538fd1498Szrj #if defined(POINTERS_EXTEND_UNSIGNED)
503638fd1498Szrj       /* If pointers extend signed and this is a pointer in Pmode, say that
503738fd1498Szrj 	 all the bits above ptr_mode are known to be sign bit copies.  */
503838fd1498Szrj       /* As we do not know which address space the pointer is referring to,
503938fd1498Szrj 	 we can do this only if the target does not support different pointer
504038fd1498Szrj 	 or address modes depending on the address space.  */
504138fd1498Szrj       if (target_default_pointer_address_modes_p ()
504238fd1498Szrj 	  && ! POINTERS_EXTEND_UNSIGNED && xmode == Pmode
504338fd1498Szrj 	  && mode == Pmode && REG_POINTER (x)
504438fd1498Szrj 	  && !targetm.have_ptr_extend ())
504538fd1498Szrj 	return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
504638fd1498Szrj #endif
504738fd1498Szrj 
504838fd1498Szrj       {
504938fd1498Szrj 	unsigned int copies_for_hook = 1, copies = 1;
505038fd1498Szrj 	rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, xmode, mode,
505138fd1498Szrj 							 &copies_for_hook);
505238fd1498Szrj 
505338fd1498Szrj 	if (new_rtx)
505438fd1498Szrj 	  copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
505538fd1498Szrj 					       known_mode, known_ret);
505638fd1498Szrj 
505738fd1498Szrj 	if (copies > 1 || copies_for_hook > 1)
505838fd1498Szrj 	  return MAX (copies, copies_for_hook);
505938fd1498Szrj 
506038fd1498Szrj 	/* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
506138fd1498Szrj       }
506238fd1498Szrj       break;
506338fd1498Szrj 
506438fd1498Szrj     case MEM:
506538fd1498Szrj       /* Some RISC machines sign-extend all loads of smaller than a word.  */
506638fd1498Szrj       if (load_extend_op (xmode) == SIGN_EXTEND)
506738fd1498Szrj 	return MAX (1, ((int) bitwidth - (int) xmode_width + 1));
506838fd1498Szrj       break;
506938fd1498Szrj 
507038fd1498Szrj     case SUBREG:
507138fd1498Szrj       /* If this is a SUBREG for a promoted object that is sign-extended
507238fd1498Szrj 	 and we are looking at it in a wider mode, we know that at least the
507338fd1498Szrj 	 high-order bits are known to be sign bit copies.  */
507438fd1498Szrj 
507538fd1498Szrj       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
507638fd1498Szrj 	{
507738fd1498Szrj 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
507838fd1498Szrj 					     known_x, known_mode, known_ret);
507938fd1498Szrj 	  return MAX ((int) bitwidth - (int) xmode_width + 1, num0);
508038fd1498Szrj 	}
508138fd1498Szrj 
508238fd1498Szrj       if (is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (x)), &inner_mode))
508338fd1498Szrj 	{
508438fd1498Szrj 	  /* For a smaller object, just ignore the high bits.  */
508538fd1498Szrj 	  if (bitwidth <= GET_MODE_PRECISION (inner_mode))
508638fd1498Szrj 	    {
508738fd1498Szrj 	      num0 = cached_num_sign_bit_copies (SUBREG_REG (x), inner_mode,
508838fd1498Szrj 						 known_x, known_mode,
508938fd1498Szrj 						 known_ret);
509038fd1498Szrj 	      return MAX (1, num0 - (int) (GET_MODE_PRECISION (inner_mode)
509138fd1498Szrj 					   - bitwidth));
509238fd1498Szrj 	    }
509338fd1498Szrj 
509438fd1498Szrj 	  /* For paradoxical SUBREGs on machines where all register operations
509538fd1498Szrj 	     affect the entire register, just look inside.  Note that we are
509638fd1498Szrj 	     passing MODE to the recursive call, so the number of sign bit
509758e805e6Szrj 	     copies will remain relative to that mode, not the inner mode.
509838fd1498Szrj 
509958e805e6Szrj 	     This works only if loads sign extend.  Otherwise, if we get a
510038fd1498Szrj 	     reload for the inner part, it may be loaded from the stack, and
510138fd1498Szrj 	     then we lose all sign bit copies that existed before the store
510238fd1498Szrj 	     to the stack.  */
510338fd1498Szrj 	  if (WORD_REGISTER_OPERATIONS
510438fd1498Szrj 	      && load_extend_op (inner_mode) == SIGN_EXTEND
510538fd1498Szrj 	      && paradoxical_subreg_p (x)
510638fd1498Szrj 	      && MEM_P (SUBREG_REG (x)))
510738fd1498Szrj 	    return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
510838fd1498Szrj 					       known_x, known_mode, known_ret);
510938fd1498Szrj 	}
511038fd1498Szrj       break;
511138fd1498Szrj 
511238fd1498Szrj     case SIGN_EXTRACT:
511338fd1498Szrj       if (CONST_INT_P (XEXP (x, 1)))
511438fd1498Szrj 	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
511538fd1498Szrj       break;
511638fd1498Szrj 
511738fd1498Szrj     case SIGN_EXTEND:
511838fd1498Szrj       if (is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
511938fd1498Szrj 	return (bitwidth - GET_MODE_PRECISION (inner_mode)
512038fd1498Szrj 		+ cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
512138fd1498Szrj 					      known_x, known_mode, known_ret));
512238fd1498Szrj       break;
512338fd1498Szrj 
512438fd1498Szrj     case TRUNCATE:
512538fd1498Szrj       /* For a smaller object, just ignore the high bits.  */
512638fd1498Szrj       inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
512738fd1498Szrj       num0 = cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
512838fd1498Szrj 					 known_x, known_mode, known_ret);
512938fd1498Szrj       return MAX (1, (num0 - (int) (GET_MODE_PRECISION (inner_mode)
513038fd1498Szrj 				    - bitwidth)));
513138fd1498Szrj 
513238fd1498Szrj     case NOT:
513338fd1498Szrj       return cached_num_sign_bit_copies (XEXP (x, 0), mode,
513438fd1498Szrj 					 known_x, known_mode, known_ret);
513538fd1498Szrj 
513638fd1498Szrj     case ROTATE:       case ROTATERT:
513738fd1498Szrj       /* If we are rotating left by a number of bits less than the number
513838fd1498Szrj 	 of sign bit copies, we can just subtract that amount from the
513938fd1498Szrj 	 number.  */
514038fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
514138fd1498Szrj 	  && INTVAL (XEXP (x, 1)) >= 0
514238fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
514338fd1498Szrj 	{
514438fd1498Szrj 	  num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
514538fd1498Szrj 					     known_x, known_mode, known_ret);
514638fd1498Szrj 	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
514738fd1498Szrj 				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
514838fd1498Szrj 	}
514938fd1498Szrj       break;
515038fd1498Szrj 
515138fd1498Szrj     case NEG:
515238fd1498Szrj       /* In general, this subtracts one sign bit copy.  But if the value
515338fd1498Szrj 	 is known to be positive, the number of sign bit copies is the
515438fd1498Szrj 	 same as that of the input.  Finally, if the input has just one bit
515538fd1498Szrj 	 that might be nonzero, all the bits are copies of the sign bit.  */
515638fd1498Szrj       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
515738fd1498Szrj 					 known_x, known_mode, known_ret);
515838fd1498Szrj       if (bitwidth > HOST_BITS_PER_WIDE_INT)
515938fd1498Szrj 	return num0 > 1 ? num0 - 1 : 1;
516038fd1498Szrj 
516138fd1498Szrj       nonzero = nonzero_bits (XEXP (x, 0), mode);
516238fd1498Szrj       if (nonzero == 1)
516338fd1498Szrj 	return bitwidth;
516438fd1498Szrj 
516538fd1498Szrj       if (num0 > 1
516638fd1498Szrj 	  && ((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero))
516738fd1498Szrj 	num0--;
516838fd1498Szrj 
516938fd1498Szrj       return num0;
517038fd1498Szrj 
517138fd1498Szrj     case IOR:   case AND:   case XOR:
517238fd1498Szrj     case SMIN:  case SMAX:  case UMIN:  case UMAX:
517338fd1498Szrj       /* Logical operations will preserve the number of sign-bit copies.
517438fd1498Szrj 	 MIN and MAX operations always return one of the operands.  */
517538fd1498Szrj       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
517638fd1498Szrj 					 known_x, known_mode, known_ret);
517738fd1498Szrj       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
517838fd1498Szrj 					 known_x, known_mode, known_ret);
517938fd1498Szrj 
518038fd1498Szrj       /* If num1 is clearing some of the top bits then regardless of
518138fd1498Szrj 	 the other term, we are guaranteed to have at least that many
518238fd1498Szrj 	 high-order zero bits.  */
518338fd1498Szrj       if (code == AND
518438fd1498Szrj 	  && num1 > 1
518538fd1498Szrj 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
518638fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
518738fd1498Szrj 	  && (UINTVAL (XEXP (x, 1))
518838fd1498Szrj 	      & (HOST_WIDE_INT_1U << (bitwidth - 1))) == 0)
518938fd1498Szrj 	return num1;
519038fd1498Szrj 
519138fd1498Szrj       /* Similarly for IOR when setting high-order bits.  */
519238fd1498Szrj       if (code == IOR
519338fd1498Szrj 	  && num1 > 1
519438fd1498Szrj 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
519538fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
519638fd1498Szrj 	  && (UINTVAL (XEXP (x, 1))
519738fd1498Szrj 	      & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
519838fd1498Szrj 	return num1;
519938fd1498Szrj 
520038fd1498Szrj       return MIN (num0, num1);
520138fd1498Szrj 
520238fd1498Szrj     case PLUS:  case MINUS:
520338fd1498Szrj       /* For addition and subtraction, we can have a 1-bit carry.  However,
520438fd1498Szrj 	 if we are subtracting 1 from a positive number, there will not
520538fd1498Szrj 	 be such a carry.  Furthermore, if the positive number is known to
520638fd1498Szrj 	 be 0 or 1, we know the result is either -1 or 0.  */
520738fd1498Szrj 
520838fd1498Szrj       if (code == PLUS && XEXP (x, 1) == constm1_rtx
520938fd1498Szrj 	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
521038fd1498Szrj 	{
521138fd1498Szrj 	  nonzero = nonzero_bits (XEXP (x, 0), mode);
521238fd1498Szrj 	  if (((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero) == 0)
521338fd1498Szrj 	    return (nonzero == 1 || nonzero == 0 ? bitwidth
521438fd1498Szrj 		    : bitwidth - floor_log2 (nonzero) - 1);
521538fd1498Szrj 	}
521638fd1498Szrj 
521738fd1498Szrj       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
521838fd1498Szrj 					 known_x, known_mode, known_ret);
521938fd1498Szrj       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
522038fd1498Szrj 					 known_x, known_mode, known_ret);
522138fd1498Szrj       result = MAX (1, MIN (num0, num1) - 1);
522238fd1498Szrj 
522338fd1498Szrj       return result;
522438fd1498Szrj 
522538fd1498Szrj     case MULT:
522638fd1498Szrj       /* The number of bits of the product is the sum of the number of
522738fd1498Szrj 	 bits of both terms.  However, unless one of the terms if known
522838fd1498Szrj 	 to be positive, we must allow for an additional bit since negating
522938fd1498Szrj 	 a negative number can remove one sign bit copy.  */
523038fd1498Szrj 
523138fd1498Szrj       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
523238fd1498Szrj 					 known_x, known_mode, known_ret);
523338fd1498Szrj       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
523438fd1498Szrj 					 known_x, known_mode, known_ret);
523538fd1498Szrj 
523638fd1498Szrj       result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
523738fd1498Szrj       if (result > 0
523838fd1498Szrj 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
523938fd1498Szrj 	      || (((nonzero_bits (XEXP (x, 0), mode)
524038fd1498Szrj 		    & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
524138fd1498Szrj 		  && ((nonzero_bits (XEXP (x, 1), mode)
524238fd1498Szrj 		       & (HOST_WIDE_INT_1U << (bitwidth - 1)))
524338fd1498Szrj 		      != 0))))
524438fd1498Szrj 	result--;
524538fd1498Szrj 
524638fd1498Szrj       return MAX (1, result);
524738fd1498Szrj 
524838fd1498Szrj     case UDIV:
524938fd1498Szrj       /* The result must be <= the first operand.  If the first operand
525038fd1498Szrj 	 has the high bit set, we know nothing about the number of sign
525138fd1498Szrj 	 bit copies.  */
525238fd1498Szrj       if (bitwidth > HOST_BITS_PER_WIDE_INT)
525338fd1498Szrj 	return 1;
525438fd1498Szrj       else if ((nonzero_bits (XEXP (x, 0), mode)
525538fd1498Szrj 		& (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
525638fd1498Szrj 	return 1;
525738fd1498Szrj       else
525838fd1498Szrj 	return cached_num_sign_bit_copies (XEXP (x, 0), mode,
525938fd1498Szrj 					   known_x, known_mode, known_ret);
526038fd1498Szrj 
526138fd1498Szrj     case UMOD:
526238fd1498Szrj       /* The result must be <= the second operand.  If the second operand
526338fd1498Szrj 	 has (or just might have) the high bit set, we know nothing about
526438fd1498Szrj 	 the number of sign bit copies.  */
526538fd1498Szrj       if (bitwidth > HOST_BITS_PER_WIDE_INT)
526638fd1498Szrj 	return 1;
526738fd1498Szrj       else if ((nonzero_bits (XEXP (x, 1), mode)
526838fd1498Szrj 		& (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
526938fd1498Szrj 	return 1;
527038fd1498Szrj       else
527138fd1498Szrj 	return cached_num_sign_bit_copies (XEXP (x, 1), mode,
527238fd1498Szrj 					   known_x, known_mode, known_ret);
527338fd1498Szrj 
527438fd1498Szrj     case DIV:
527538fd1498Szrj       /* Similar to unsigned division, except that we have to worry about
527638fd1498Szrj 	 the case where the divisor is negative, in which case we have
527738fd1498Szrj 	 to add 1.  */
527838fd1498Szrj       result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
527938fd1498Szrj 					   known_x, known_mode, known_ret);
528038fd1498Szrj       if (result > 1
528138fd1498Szrj 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
528238fd1498Szrj 	      || (nonzero_bits (XEXP (x, 1), mode)
528338fd1498Szrj 		  & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
528438fd1498Szrj 	result--;
528538fd1498Szrj 
528638fd1498Szrj       return result;
528738fd1498Szrj 
528838fd1498Szrj     case MOD:
528938fd1498Szrj       result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
529038fd1498Szrj 					   known_x, known_mode, known_ret);
529138fd1498Szrj       if (result > 1
529238fd1498Szrj 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
529338fd1498Szrj 	      || (nonzero_bits (XEXP (x, 1), mode)
529438fd1498Szrj 		  & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
529538fd1498Szrj 	result--;
529638fd1498Szrj 
529738fd1498Szrj       return result;
529838fd1498Szrj 
529938fd1498Szrj     case ASHIFTRT:
530038fd1498Szrj       /* Shifts by a constant add to the number of bits equal to the
530138fd1498Szrj 	 sign bit.  */
530238fd1498Szrj       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
530338fd1498Szrj 					 known_x, known_mode, known_ret);
530438fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
530538fd1498Szrj 	  && INTVAL (XEXP (x, 1)) > 0
530638fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < xmode_width)
530738fd1498Szrj 	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
530838fd1498Szrj 
530938fd1498Szrj       return num0;
531038fd1498Szrj 
531138fd1498Szrj     case ASHIFT:
531238fd1498Szrj       /* Left shifts destroy copies.  */
531338fd1498Szrj       if (!CONST_INT_P (XEXP (x, 1))
531438fd1498Szrj 	  || INTVAL (XEXP (x, 1)) < 0
531538fd1498Szrj 	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth
531638fd1498Szrj 	  || INTVAL (XEXP (x, 1)) >= xmode_width)
531738fd1498Szrj 	return 1;
531838fd1498Szrj 
531938fd1498Szrj       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
532038fd1498Szrj 					 known_x, known_mode, known_ret);
532138fd1498Szrj       return MAX (1, num0 - INTVAL (XEXP (x, 1)));
532238fd1498Szrj 
532338fd1498Szrj     case IF_THEN_ELSE:
532438fd1498Szrj       num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
532538fd1498Szrj 					 known_x, known_mode, known_ret);
532638fd1498Szrj       num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
532738fd1498Szrj 					 known_x, known_mode, known_ret);
532838fd1498Szrj       return MIN (num0, num1);
532938fd1498Szrj 
533038fd1498Szrj     case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
533138fd1498Szrj     case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
533238fd1498Szrj     case GEU: case GTU: case LEU: case LTU:
533338fd1498Szrj     case UNORDERED: case ORDERED:
533438fd1498Szrj       /* If the constant is negative, take its 1's complement and remask.
533538fd1498Szrj 	 Then see how many zero bits we have.  */
533638fd1498Szrj       nonzero = STORE_FLAG_VALUE;
533738fd1498Szrj       if (bitwidth <= HOST_BITS_PER_WIDE_INT
533838fd1498Szrj 	  && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
533938fd1498Szrj 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
534038fd1498Szrj 
534138fd1498Szrj       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
534238fd1498Szrj 
534338fd1498Szrj     default:
534438fd1498Szrj       break;
534538fd1498Szrj     }
534638fd1498Szrj 
534738fd1498Szrj   /* If we haven't been able to figure it out by one of the above rules,
534838fd1498Szrj      see if some of the high-order bits are known to be zero.  If so,
534938fd1498Szrj      count those bits and return one less than that amount.  If we can't
535038fd1498Szrj      safely compute the mask for this mode, always return BITWIDTH.  */
535138fd1498Szrj 
535238fd1498Szrj   bitwidth = GET_MODE_PRECISION (mode);
535338fd1498Szrj   if (bitwidth > HOST_BITS_PER_WIDE_INT)
535438fd1498Szrj     return 1;
535538fd1498Szrj 
535638fd1498Szrj   nonzero = nonzero_bits (x, mode);
535738fd1498Szrj   return nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))
535838fd1498Szrj 	 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
535938fd1498Szrj }
536038fd1498Szrj 
536138fd1498Szrj /* Calculate the rtx_cost of a single instruction pattern.  A return value of
536238fd1498Szrj    zero indicates an instruction pattern without a known cost.  */
536338fd1498Szrj 
536438fd1498Szrj int
pattern_cost(rtx pat,bool speed)536538fd1498Szrj pattern_cost (rtx pat, bool speed)
536638fd1498Szrj {
536738fd1498Szrj   int i, cost;
536838fd1498Szrj   rtx set;
536938fd1498Szrj 
537038fd1498Szrj   /* Extract the single set rtx from the instruction pattern.  We
537138fd1498Szrj      can't use single_set since we only have the pattern.  We also
537238fd1498Szrj      consider PARALLELs of a normal set and a single comparison.  In
537338fd1498Szrj      that case we use the cost of the non-comparison SET operation,
537438fd1498Szrj      which is most-likely to be the real cost of this operation.  */
537538fd1498Szrj   if (GET_CODE (pat) == SET)
537638fd1498Szrj     set = pat;
537738fd1498Szrj   else if (GET_CODE (pat) == PARALLEL)
537838fd1498Szrj     {
537938fd1498Szrj       set = NULL_RTX;
538038fd1498Szrj       rtx comparison = NULL_RTX;
538138fd1498Szrj 
538238fd1498Szrj       for (i = 0; i < XVECLEN (pat, 0); i++)
538338fd1498Szrj 	{
538438fd1498Szrj 	  rtx x = XVECEXP (pat, 0, i);
538538fd1498Szrj 	  if (GET_CODE (x) == SET)
538638fd1498Szrj 	    {
538738fd1498Szrj 	      if (GET_CODE (SET_SRC (x)) == COMPARE)
538838fd1498Szrj 		{
538938fd1498Szrj 		  if (comparison)
539038fd1498Szrj 		    return 0;
539138fd1498Szrj 		  comparison = x;
539238fd1498Szrj 		}
539338fd1498Szrj 	      else
539438fd1498Szrj 		{
539538fd1498Szrj 		  if (set)
539638fd1498Szrj 		    return 0;
539738fd1498Szrj 		  set = x;
539838fd1498Szrj 		}
539938fd1498Szrj 	    }
540038fd1498Szrj 	}
540138fd1498Szrj 
540238fd1498Szrj       if (!set && comparison)
540338fd1498Szrj 	set = comparison;
540438fd1498Szrj 
540538fd1498Szrj       if (!set)
540638fd1498Szrj 	return 0;
540738fd1498Szrj     }
540838fd1498Szrj   else
540938fd1498Szrj     return 0;
541038fd1498Szrj 
541138fd1498Szrj   cost = set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)), speed);
541238fd1498Szrj   return cost > 0 ? cost : COSTS_N_INSNS (1);
541338fd1498Szrj }
541438fd1498Szrj 
541538fd1498Szrj /* Calculate the cost of a single instruction.  A return value of zero
541638fd1498Szrj    indicates an instruction pattern without a known cost.  */
541738fd1498Szrj 
541838fd1498Szrj int
insn_cost(rtx_insn * insn,bool speed)541938fd1498Szrj insn_cost (rtx_insn *insn, bool speed)
542038fd1498Szrj {
542138fd1498Szrj   if (targetm.insn_cost)
542238fd1498Szrj     return targetm.insn_cost (insn, speed);
542338fd1498Szrj 
542438fd1498Szrj   return pattern_cost (PATTERN (insn), speed);
542538fd1498Szrj }
542638fd1498Szrj 
542738fd1498Szrj /* Returns estimate on cost of computing SEQ.  */
542838fd1498Szrj 
542938fd1498Szrj unsigned
seq_cost(const rtx_insn * seq,bool speed)543038fd1498Szrj seq_cost (const rtx_insn *seq, bool speed)
543138fd1498Szrj {
543238fd1498Szrj   unsigned cost = 0;
543338fd1498Szrj   rtx set;
543438fd1498Szrj 
543538fd1498Szrj   for (; seq; seq = NEXT_INSN (seq))
543638fd1498Szrj     {
543738fd1498Szrj       set = single_set (seq);
543838fd1498Szrj       if (set)
543938fd1498Szrj         cost += set_rtx_cost (set, speed);
544038fd1498Szrj       else if (NONDEBUG_INSN_P (seq))
544138fd1498Szrj 	{
544238fd1498Szrj 	  int this_cost = insn_cost (CONST_CAST_RTX_INSN (seq), speed);
544338fd1498Szrj 	  if (this_cost > 0)
544438fd1498Szrj 	    cost += this_cost;
544538fd1498Szrj 	  else
544638fd1498Szrj 	    cost++;
544738fd1498Szrj 	}
544838fd1498Szrj     }
544938fd1498Szrj 
545038fd1498Szrj   return cost;
545138fd1498Szrj }
545238fd1498Szrj 
545338fd1498Szrj /* Given an insn INSN and condition COND, return the condition in a
545438fd1498Szrj    canonical form to simplify testing by callers.  Specifically:
545538fd1498Szrj 
545638fd1498Szrj    (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
545738fd1498Szrj    (2) Both operands will be machine operands; (cc0) will have been replaced.
545838fd1498Szrj    (3) If an operand is a constant, it will be the second operand.
545938fd1498Szrj    (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
546038fd1498Szrj        for GE, GEU, and LEU.
546138fd1498Szrj 
546238fd1498Szrj    If the condition cannot be understood, or is an inequality floating-point
546338fd1498Szrj    comparison which needs to be reversed, 0 will be returned.
546438fd1498Szrj 
546538fd1498Szrj    If REVERSE is nonzero, then reverse the condition prior to canonizing it.
546638fd1498Szrj 
546738fd1498Szrj    If EARLIEST is nonzero, it is a pointer to a place where the earliest
546838fd1498Szrj    insn used in locating the condition was found.  If a replacement test
546938fd1498Szrj    of the condition is desired, it should be placed in front of that
547038fd1498Szrj    insn and we will be sure that the inputs are still valid.
547138fd1498Szrj 
547238fd1498Szrj    If WANT_REG is nonzero, we wish the condition to be relative to that
547338fd1498Szrj    register, if possible.  Therefore, do not canonicalize the condition
547438fd1498Szrj    further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
547538fd1498Szrj    to be a compare to a CC mode register.
547638fd1498Szrj 
547738fd1498Szrj    If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
547838fd1498Szrj    and at INSN.  */
547938fd1498Szrj 
548038fd1498Szrj rtx
canonicalize_condition(rtx_insn * insn,rtx cond,int reverse,rtx_insn ** earliest,rtx want_reg,int allow_cc_mode,int valid_at_insn_p)548138fd1498Szrj canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
548238fd1498Szrj 			rtx_insn **earliest,
548338fd1498Szrj 			rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
548438fd1498Szrj {
548538fd1498Szrj   enum rtx_code code;
548638fd1498Szrj   rtx_insn *prev = insn;
548738fd1498Szrj   const_rtx set;
548838fd1498Szrj   rtx tem;
548938fd1498Szrj   rtx op0, op1;
549038fd1498Szrj   int reverse_code = 0;
549138fd1498Szrj   machine_mode mode;
549238fd1498Szrj   basic_block bb = BLOCK_FOR_INSN (insn);
549338fd1498Szrj 
549438fd1498Szrj   code = GET_CODE (cond);
549538fd1498Szrj   mode = GET_MODE (cond);
549638fd1498Szrj   op0 = XEXP (cond, 0);
549738fd1498Szrj   op1 = XEXP (cond, 1);
549838fd1498Szrj 
549938fd1498Szrj   if (reverse)
550038fd1498Szrj     code = reversed_comparison_code (cond, insn);
550138fd1498Szrj   if (code == UNKNOWN)
550238fd1498Szrj     return 0;
550338fd1498Szrj 
550438fd1498Szrj   if (earliest)
550538fd1498Szrj     *earliest = insn;
550638fd1498Szrj 
550738fd1498Szrj   /* If we are comparing a register with zero, see if the register is set
550838fd1498Szrj      in the previous insn to a COMPARE or a comparison operation.  Perform
550938fd1498Szrj      the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
551038fd1498Szrj      in cse.c  */
551138fd1498Szrj 
551238fd1498Szrj   while ((GET_RTX_CLASS (code) == RTX_COMPARE
551338fd1498Szrj 	  || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
551438fd1498Szrj 	 && op1 == CONST0_RTX (GET_MODE (op0))
551538fd1498Szrj 	 && op0 != want_reg)
551638fd1498Szrj     {
551738fd1498Szrj       /* Set nonzero when we find something of interest.  */
551838fd1498Szrj       rtx x = 0;
551938fd1498Szrj 
552038fd1498Szrj       /* If comparison with cc0, import actual comparison from compare
552138fd1498Szrj 	 insn.  */
552238fd1498Szrj       if (op0 == cc0_rtx)
552338fd1498Szrj 	{
552438fd1498Szrj 	  if ((prev = prev_nonnote_insn (prev)) == 0
552538fd1498Szrj 	      || !NONJUMP_INSN_P (prev)
552638fd1498Szrj 	      || (set = single_set (prev)) == 0
552738fd1498Szrj 	      || SET_DEST (set) != cc0_rtx)
552838fd1498Szrj 	    return 0;
552938fd1498Szrj 
553038fd1498Szrj 	  op0 = SET_SRC (set);
553138fd1498Szrj 	  op1 = CONST0_RTX (GET_MODE (op0));
553238fd1498Szrj 	  if (earliest)
553338fd1498Szrj 	    *earliest = prev;
553438fd1498Szrj 	}
553538fd1498Szrj 
553638fd1498Szrj       /* If this is a COMPARE, pick up the two things being compared.  */
553738fd1498Szrj       if (GET_CODE (op0) == COMPARE)
553838fd1498Szrj 	{
553938fd1498Szrj 	  op1 = XEXP (op0, 1);
554038fd1498Szrj 	  op0 = XEXP (op0, 0);
554138fd1498Szrj 	  continue;
554238fd1498Szrj 	}
554338fd1498Szrj       else if (!REG_P (op0))
554438fd1498Szrj 	break;
554538fd1498Szrj 
554638fd1498Szrj       /* Go back to the previous insn.  Stop if it is not an INSN.  We also
554738fd1498Szrj 	 stop if it isn't a single set or if it has a REG_INC note because
554838fd1498Szrj 	 we don't want to bother dealing with it.  */
554938fd1498Szrj 
555038fd1498Szrj       prev = prev_nonnote_nondebug_insn (prev);
555138fd1498Szrj 
555238fd1498Szrj       if (prev == 0
555338fd1498Szrj 	  || !NONJUMP_INSN_P (prev)
555438fd1498Szrj 	  || FIND_REG_INC_NOTE (prev, NULL_RTX)
555538fd1498Szrj 	  /* In cfglayout mode, there do not have to be labels at the
555638fd1498Szrj 	     beginning of a block, or jumps at the end, so the previous
555738fd1498Szrj 	     conditions would not stop us when we reach bb boundary.  */
555838fd1498Szrj 	  || BLOCK_FOR_INSN (prev) != bb)
555938fd1498Szrj 	break;
556038fd1498Szrj 
556138fd1498Szrj       set = set_of (op0, prev);
556238fd1498Szrj 
556338fd1498Szrj       if (set
556438fd1498Szrj 	  && (GET_CODE (set) != SET
556538fd1498Szrj 	      || !rtx_equal_p (SET_DEST (set), op0)))
556638fd1498Szrj 	break;
556738fd1498Szrj 
556838fd1498Szrj       /* If this is setting OP0, get what it sets it to if it looks
556938fd1498Szrj 	 relevant.  */
557038fd1498Szrj       if (set)
557138fd1498Szrj 	{
557238fd1498Szrj 	  machine_mode inner_mode = GET_MODE (SET_DEST (set));
557338fd1498Szrj #ifdef FLOAT_STORE_FLAG_VALUE
557438fd1498Szrj 	  REAL_VALUE_TYPE fsfv;
557538fd1498Szrj #endif
557638fd1498Szrj 
557738fd1498Szrj 	  /* ??? We may not combine comparisons done in a CCmode with
557838fd1498Szrj 	     comparisons not done in a CCmode.  This is to aid targets
557938fd1498Szrj 	     like Alpha that have an IEEE compliant EQ instruction, and
558038fd1498Szrj 	     a non-IEEE compliant BEQ instruction.  The use of CCmode is
558138fd1498Szrj 	     actually artificial, simply to prevent the combination, but
558238fd1498Szrj 	     should not affect other platforms.
558338fd1498Szrj 
558438fd1498Szrj 	     However, we must allow VOIDmode comparisons to match either
558538fd1498Szrj 	     CCmode or non-CCmode comparison, because some ports have
558638fd1498Szrj 	     modeless comparisons inside branch patterns.
558738fd1498Szrj 
558838fd1498Szrj 	     ??? This mode check should perhaps look more like the mode check
558938fd1498Szrj 	     in simplify_comparison in combine.  */
559038fd1498Szrj 	  if (((GET_MODE_CLASS (mode) == MODE_CC)
559138fd1498Szrj 	       != (GET_MODE_CLASS (inner_mode) == MODE_CC))
559238fd1498Szrj 	      && mode != VOIDmode
559338fd1498Szrj 	      && inner_mode != VOIDmode)
559438fd1498Szrj 	    break;
559538fd1498Szrj 	  if (GET_CODE (SET_SRC (set)) == COMPARE
559638fd1498Szrj 	      || (((code == NE
559738fd1498Szrj 		    || (code == LT
559838fd1498Szrj 			&& val_signbit_known_set_p (inner_mode,
559938fd1498Szrj 						    STORE_FLAG_VALUE))
560038fd1498Szrj #ifdef FLOAT_STORE_FLAG_VALUE
560138fd1498Szrj 		    || (code == LT
560238fd1498Szrj 			&& SCALAR_FLOAT_MODE_P (inner_mode)
560338fd1498Szrj 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
560438fd1498Szrj 			    REAL_VALUE_NEGATIVE (fsfv)))
560538fd1498Szrj #endif
560638fd1498Szrj 		    ))
560738fd1498Szrj 		  && COMPARISON_P (SET_SRC (set))))
560838fd1498Szrj 	    x = SET_SRC (set);
560938fd1498Szrj 	  else if (((code == EQ
561038fd1498Szrj 		     || (code == GE
561138fd1498Szrj 			 && val_signbit_known_set_p (inner_mode,
561238fd1498Szrj 						     STORE_FLAG_VALUE))
561338fd1498Szrj #ifdef FLOAT_STORE_FLAG_VALUE
561438fd1498Szrj 		     || (code == GE
561538fd1498Szrj 			 && SCALAR_FLOAT_MODE_P (inner_mode)
561638fd1498Szrj 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
561738fd1498Szrj 			     REAL_VALUE_NEGATIVE (fsfv)))
561838fd1498Szrj #endif
561938fd1498Szrj 		     ))
562038fd1498Szrj 		   && COMPARISON_P (SET_SRC (set)))
562138fd1498Szrj 	    {
562238fd1498Szrj 	      reverse_code = 1;
562338fd1498Szrj 	      x = SET_SRC (set);
562438fd1498Szrj 	    }
562538fd1498Szrj 	  else if ((code == EQ || code == NE)
562638fd1498Szrj 		   && GET_CODE (SET_SRC (set)) == XOR)
562738fd1498Szrj 	    /* Handle sequences like:
562838fd1498Szrj 
562938fd1498Szrj 	       (set op0 (xor X Y))
563038fd1498Szrj 	       ...(eq|ne op0 (const_int 0))...
563138fd1498Szrj 
563238fd1498Szrj 	       in which case:
563338fd1498Szrj 
563438fd1498Szrj 	       (eq op0 (const_int 0)) reduces to (eq X Y)
563538fd1498Szrj 	       (ne op0 (const_int 0)) reduces to (ne X Y)
563638fd1498Szrj 
563738fd1498Szrj 	       This is the form used by MIPS16, for example.  */
563838fd1498Szrj 	    x = SET_SRC (set);
563938fd1498Szrj 	  else
564038fd1498Szrj 	    break;
564138fd1498Szrj 	}
564238fd1498Szrj 
564338fd1498Szrj       else if (reg_set_p (op0, prev))
564438fd1498Szrj 	/* If this sets OP0, but not directly, we have to give up.  */
564538fd1498Szrj 	break;
564638fd1498Szrj 
564738fd1498Szrj       if (x)
564838fd1498Szrj 	{
564938fd1498Szrj 	  /* If the caller is expecting the condition to be valid at INSN,
565038fd1498Szrj 	     make sure X doesn't change before INSN.  */
565138fd1498Szrj 	  if (valid_at_insn_p)
565238fd1498Szrj 	    if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
565338fd1498Szrj 	      break;
565438fd1498Szrj 	  if (COMPARISON_P (x))
565538fd1498Szrj 	    code = GET_CODE (x);
565638fd1498Szrj 	  if (reverse_code)
565738fd1498Szrj 	    {
565838fd1498Szrj 	      code = reversed_comparison_code (x, prev);
565938fd1498Szrj 	      if (code == UNKNOWN)
566038fd1498Szrj 		return 0;
566138fd1498Szrj 	      reverse_code = 0;
566238fd1498Szrj 	    }
566338fd1498Szrj 
566438fd1498Szrj 	  op0 = XEXP (x, 0), op1 = XEXP (x, 1);
566538fd1498Szrj 	  if (earliest)
566638fd1498Szrj 	    *earliest = prev;
566738fd1498Szrj 	}
566838fd1498Szrj     }
566938fd1498Szrj 
567038fd1498Szrj   /* If constant is first, put it last.  */
567138fd1498Szrj   if (CONSTANT_P (op0))
567238fd1498Szrj     code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
567338fd1498Szrj 
567438fd1498Szrj   /* If OP0 is the result of a comparison, we weren't able to find what
567538fd1498Szrj      was really being compared, so fail.  */
567638fd1498Szrj   if (!allow_cc_mode
567738fd1498Szrj       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
567838fd1498Szrj     return 0;
567938fd1498Szrj 
568038fd1498Szrj   /* Canonicalize any ordered comparison with integers involving equality
568138fd1498Szrj      if we can do computations in the relevant mode and we do not
568238fd1498Szrj      overflow.  */
568338fd1498Szrj 
568438fd1498Szrj   scalar_int_mode op0_mode;
568538fd1498Szrj   if (CONST_INT_P (op1)
568638fd1498Szrj       && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
568738fd1498Szrj       && GET_MODE_PRECISION (op0_mode) <= HOST_BITS_PER_WIDE_INT)
568838fd1498Szrj     {
568938fd1498Szrj       HOST_WIDE_INT const_val = INTVAL (op1);
569038fd1498Szrj       unsigned HOST_WIDE_INT uconst_val = const_val;
569138fd1498Szrj       unsigned HOST_WIDE_INT max_val
569238fd1498Szrj 	= (unsigned HOST_WIDE_INT) GET_MODE_MASK (op0_mode);
569338fd1498Szrj 
569438fd1498Szrj       switch (code)
569538fd1498Szrj 	{
569638fd1498Szrj 	case LE:
569738fd1498Szrj 	  if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
569838fd1498Szrj 	    code = LT, op1 = gen_int_mode (const_val + 1, op0_mode);
569938fd1498Szrj 	  break;
570038fd1498Szrj 
570138fd1498Szrj 	/* When cross-compiling, const_val might be sign-extended from
570238fd1498Szrj 	   BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
570338fd1498Szrj 	case GE:
570438fd1498Szrj 	  if ((const_val & max_val)
570538fd1498Szrj 	      != (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (op0_mode) - 1)))
570638fd1498Szrj 	    code = GT, op1 = gen_int_mode (const_val - 1, op0_mode);
570738fd1498Szrj 	  break;
570838fd1498Szrj 
570938fd1498Szrj 	case LEU:
571038fd1498Szrj 	  if (uconst_val < max_val)
571138fd1498Szrj 	    code = LTU, op1 = gen_int_mode (uconst_val + 1, op0_mode);
571238fd1498Szrj 	  break;
571338fd1498Szrj 
571438fd1498Szrj 	case GEU:
571538fd1498Szrj 	  if (uconst_val != 0)
571638fd1498Szrj 	    code = GTU, op1 = gen_int_mode (uconst_val - 1, op0_mode);
571738fd1498Szrj 	  break;
571838fd1498Szrj 
571938fd1498Szrj 	default:
572038fd1498Szrj 	  break;
572138fd1498Szrj 	}
572238fd1498Szrj     }
572338fd1498Szrj 
572438fd1498Szrj   /* Never return CC0; return zero instead.  */
572538fd1498Szrj   if (CC0_P (op0))
572638fd1498Szrj     return 0;
572738fd1498Szrj 
572838fd1498Szrj   /* We promised to return a comparison.  */
572938fd1498Szrj   rtx ret = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
573038fd1498Szrj   if (COMPARISON_P (ret))
573138fd1498Szrj     return ret;
573238fd1498Szrj   return 0;
573338fd1498Szrj }
573438fd1498Szrj 
573538fd1498Szrj /* Given a jump insn JUMP, return the condition that will cause it to branch
573638fd1498Szrj    to its JUMP_LABEL.  If the condition cannot be understood, or is an
573738fd1498Szrj    inequality floating-point comparison which needs to be reversed, 0 will
573838fd1498Szrj    be returned.
573938fd1498Szrj 
574038fd1498Szrj    If EARLIEST is nonzero, it is a pointer to a place where the earliest
574138fd1498Szrj    insn used in locating the condition was found.  If a replacement test
574238fd1498Szrj    of the condition is desired, it should be placed in front of that
574338fd1498Szrj    insn and we will be sure that the inputs are still valid.  If EARLIEST
574438fd1498Szrj    is null, the returned condition will be valid at INSN.
574538fd1498Szrj 
574638fd1498Szrj    If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
574738fd1498Szrj    compare CC mode register.
574838fd1498Szrj 
574938fd1498Szrj    VALID_AT_INSN_P is the same as for canonicalize_condition.  */
575038fd1498Szrj 
575138fd1498Szrj rtx
get_condition(rtx_insn * jump,rtx_insn ** earliest,int allow_cc_mode,int valid_at_insn_p)575238fd1498Szrj get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
575338fd1498Szrj 	       int valid_at_insn_p)
575438fd1498Szrj {
575538fd1498Szrj   rtx cond;
575638fd1498Szrj   int reverse;
575738fd1498Szrj   rtx set;
575838fd1498Szrj 
575938fd1498Szrj   /* If this is not a standard conditional jump, we can't parse it.  */
576038fd1498Szrj   if (!JUMP_P (jump)
576138fd1498Szrj       || ! any_condjump_p (jump))
576238fd1498Szrj     return 0;
576338fd1498Szrj   set = pc_set (jump);
576438fd1498Szrj 
576538fd1498Szrj   cond = XEXP (SET_SRC (set), 0);
576638fd1498Szrj 
576738fd1498Szrj   /* If this branches to JUMP_LABEL when the condition is false, reverse
576838fd1498Szrj      the condition.  */
576938fd1498Szrj   reverse
577038fd1498Szrj     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
577138fd1498Szrj       && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
577238fd1498Szrj 
577338fd1498Szrj   return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
577438fd1498Szrj 				 allow_cc_mode, valid_at_insn_p);
577538fd1498Szrj }
577638fd1498Szrj 
577738fd1498Szrj /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
577838fd1498Szrj    TARGET_MODE_REP_EXTENDED.
577938fd1498Szrj 
578038fd1498Szrj    Note that we assume that the property of
578138fd1498Szrj    TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
578238fd1498Szrj    narrower than mode B.  I.e., if A is a mode narrower than B then in
578338fd1498Szrj    order to be able to operate on it in mode B, mode A needs to
578438fd1498Szrj    satisfy the requirements set by the representation of mode B.  */
578538fd1498Szrj 
578638fd1498Szrj static void
init_num_sign_bit_copies_in_rep(void)578738fd1498Szrj init_num_sign_bit_copies_in_rep (void)
578838fd1498Szrj {
578938fd1498Szrj   opt_scalar_int_mode in_mode_iter;
579038fd1498Szrj   scalar_int_mode mode;
579138fd1498Szrj 
579238fd1498Szrj   FOR_EACH_MODE_IN_CLASS (in_mode_iter, MODE_INT)
579338fd1498Szrj     FOR_EACH_MODE_UNTIL (mode, in_mode_iter.require ())
579438fd1498Szrj       {
579538fd1498Szrj 	scalar_int_mode in_mode = in_mode_iter.require ();
579638fd1498Szrj 	scalar_int_mode i;
579738fd1498Szrj 
579838fd1498Szrj 	/* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
579938fd1498Szrj 	   extends to the next widest mode.  */
580038fd1498Szrj 	gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
580138fd1498Szrj 		    || GET_MODE_WIDER_MODE (mode).require () == in_mode);
580238fd1498Szrj 
580338fd1498Szrj 	/* We are in in_mode.  Count how many bits outside of mode
580438fd1498Szrj 	   have to be copies of the sign-bit.  */
580538fd1498Szrj 	FOR_EACH_MODE (i, mode, in_mode)
580638fd1498Szrj 	  {
580738fd1498Szrj 	    /* This must always exist (for the last iteration it will be
580838fd1498Szrj 	       IN_MODE).  */
580938fd1498Szrj 	    scalar_int_mode wider = GET_MODE_WIDER_MODE (i).require ();
581038fd1498Szrj 
581138fd1498Szrj 	    if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
581238fd1498Szrj 		/* We can only check sign-bit copies starting from the
581338fd1498Szrj 		   top-bit.  In order to be able to check the bits we
581438fd1498Szrj 		   have already seen we pretend that subsequent bits
581538fd1498Szrj 		   have to be sign-bit copies too.  */
581638fd1498Szrj 		|| num_sign_bit_copies_in_rep [in_mode][mode])
581738fd1498Szrj 	      num_sign_bit_copies_in_rep [in_mode][mode]
581838fd1498Szrj 		+= GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
581938fd1498Szrj 	  }
582038fd1498Szrj       }
582138fd1498Szrj }
582238fd1498Szrj 
582338fd1498Szrj /* Suppose that truncation from the machine mode of X to MODE is not a
582438fd1498Szrj    no-op.  See if there is anything special about X so that we can
582538fd1498Szrj    assume it already contains a truncated value of MODE.  */
582638fd1498Szrj 
582738fd1498Szrj bool
truncated_to_mode(machine_mode mode,const_rtx x)582838fd1498Szrj truncated_to_mode (machine_mode mode, const_rtx x)
582938fd1498Szrj {
583038fd1498Szrj   /* This register has already been used in MODE without explicit
583138fd1498Szrj      truncation.  */
583238fd1498Szrj   if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
583338fd1498Szrj     return true;
583438fd1498Szrj 
583538fd1498Szrj   /* See if we already satisfy the requirements of MODE.  If yes we
583638fd1498Szrj      can just switch to MODE.  */
583738fd1498Szrj   if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
583838fd1498Szrj       && (num_sign_bit_copies (x, GET_MODE (x))
583938fd1498Szrj 	  >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
584038fd1498Szrj     return true;
584138fd1498Szrj 
584238fd1498Szrj   return false;
584338fd1498Szrj }
584438fd1498Szrj 
584538fd1498Szrj /* Return true if RTX code CODE has a single sequence of zero or more
584638fd1498Szrj    "e" operands and no rtvec operands.  Initialize its rtx_all_subrtx_bounds
584738fd1498Szrj    entry in that case.  */
584838fd1498Szrj 
584938fd1498Szrj static bool
setup_reg_subrtx_bounds(unsigned int code)585038fd1498Szrj setup_reg_subrtx_bounds (unsigned int code)
585138fd1498Szrj {
585238fd1498Szrj   const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
585338fd1498Szrj   unsigned int i = 0;
585438fd1498Szrj   for (; format[i] != 'e'; ++i)
585538fd1498Szrj     {
585638fd1498Szrj       if (!format[i])
585738fd1498Szrj 	/* No subrtxes.  Leave start and count as 0.  */
585838fd1498Szrj 	return true;
585938fd1498Szrj       if (format[i] == 'E' || format[i] == 'V')
586038fd1498Szrj 	return false;
586138fd1498Szrj     }
586238fd1498Szrj 
586338fd1498Szrj   /* Record the sequence of 'e's.  */
586438fd1498Szrj   rtx_all_subrtx_bounds[code].start = i;
586538fd1498Szrj   do
586638fd1498Szrj     ++i;
586738fd1498Szrj   while (format[i] == 'e');
586838fd1498Szrj   rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
586938fd1498Szrj   /* rtl-iter.h relies on this.  */
587038fd1498Szrj   gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
587138fd1498Szrj 
587238fd1498Szrj   for (; format[i]; ++i)
587338fd1498Szrj     if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
587438fd1498Szrj       return false;
587538fd1498Szrj 
587638fd1498Szrj   return true;
587738fd1498Szrj }
587838fd1498Szrj 
587938fd1498Szrj /* Initialize rtx_all_subrtx_bounds.  */
588038fd1498Szrj void
init_rtlanal(void)588138fd1498Szrj init_rtlanal (void)
588238fd1498Szrj {
588338fd1498Szrj   int i;
588438fd1498Szrj   for (i = 0; i < NUM_RTX_CODE; i++)
588538fd1498Szrj     {
588638fd1498Szrj       if (!setup_reg_subrtx_bounds (i))
588738fd1498Szrj 	rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
588838fd1498Szrj       if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
588938fd1498Szrj 	rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
589038fd1498Szrj     }
589138fd1498Szrj 
589238fd1498Szrj   init_num_sign_bit_copies_in_rep ();
589338fd1498Szrj }
589438fd1498Szrj 
589538fd1498Szrj /* Check whether this is a constant pool constant.  */
589638fd1498Szrj bool
constant_pool_constant_p(rtx x)589738fd1498Szrj constant_pool_constant_p (rtx x)
589838fd1498Szrj {
589938fd1498Szrj   x = avoid_constant_pool_reference (x);
590038fd1498Szrj   return CONST_DOUBLE_P (x);
590138fd1498Szrj }
590238fd1498Szrj 
590338fd1498Szrj /* If M is a bitmask that selects a field of low-order bits within an item but
590438fd1498Szrj    not the entire word, return the length of the field.  Return -1 otherwise.
590538fd1498Szrj    M is used in machine mode MODE.  */
590638fd1498Szrj 
590738fd1498Szrj int
low_bitmask_len(machine_mode mode,unsigned HOST_WIDE_INT m)590838fd1498Szrj low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
590938fd1498Szrj {
591038fd1498Szrj   if (mode != VOIDmode)
591138fd1498Szrj     {
591238fd1498Szrj       if (!HWI_COMPUTABLE_MODE_P (mode))
591338fd1498Szrj 	return -1;
591438fd1498Szrj       m &= GET_MODE_MASK (mode);
591538fd1498Szrj     }
591638fd1498Szrj 
591738fd1498Szrj   return exact_log2 (m + 1);
591838fd1498Szrj }
591938fd1498Szrj 
592038fd1498Szrj /* Return the mode of MEM's address.  */
592138fd1498Szrj 
592238fd1498Szrj scalar_int_mode
get_address_mode(rtx mem)592338fd1498Szrj get_address_mode (rtx mem)
592438fd1498Szrj {
592538fd1498Szrj   machine_mode mode;
592638fd1498Szrj 
592738fd1498Szrj   gcc_assert (MEM_P (mem));
592838fd1498Szrj   mode = GET_MODE (XEXP (mem, 0));
592938fd1498Szrj   if (mode != VOIDmode)
593038fd1498Szrj     return as_a <scalar_int_mode> (mode);
593138fd1498Szrj   return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
593238fd1498Szrj }
593338fd1498Szrj 
593438fd1498Szrj /* Split up a CONST_DOUBLE or integer constant rtx
593538fd1498Szrj    into two rtx's for single words,
593638fd1498Szrj    storing in *FIRST the word that comes first in memory in the target
593738fd1498Szrj    and in *SECOND the other.
593838fd1498Szrj 
593938fd1498Szrj    TODO: This function needs to be rewritten to work on any size
594038fd1498Szrj    integer.  */
594138fd1498Szrj 
594238fd1498Szrj void
split_double(rtx value,rtx * first,rtx * second)594338fd1498Szrj split_double (rtx value, rtx *first, rtx *second)
594438fd1498Szrj {
594538fd1498Szrj   if (CONST_INT_P (value))
594638fd1498Szrj     {
594738fd1498Szrj       if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
594838fd1498Szrj 	{
594938fd1498Szrj 	  /* In this case the CONST_INT holds both target words.
595038fd1498Szrj 	     Extract the bits from it into two word-sized pieces.
595138fd1498Szrj 	     Sign extend each half to HOST_WIDE_INT.  */
595238fd1498Szrj 	  unsigned HOST_WIDE_INT low, high;
595338fd1498Szrj 	  unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
595438fd1498Szrj 	  unsigned bits_per_word = BITS_PER_WORD;
595538fd1498Szrj 
595638fd1498Szrj 	  /* Set sign_bit to the most significant bit of a word.  */
595738fd1498Szrj 	  sign_bit = 1;
595838fd1498Szrj 	  sign_bit <<= bits_per_word - 1;
595938fd1498Szrj 
596038fd1498Szrj 	  /* Set mask so that all bits of the word are set.  We could
596138fd1498Szrj 	     have used 1 << BITS_PER_WORD instead of basing the
596238fd1498Szrj 	     calculation on sign_bit.  However, on machines where
596338fd1498Szrj 	     HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
596438fd1498Szrj 	     compiler warning, even though the code would never be
596538fd1498Szrj 	     executed.  */
596638fd1498Szrj 	  mask = sign_bit << 1;
596738fd1498Szrj 	  mask--;
596838fd1498Szrj 
596938fd1498Szrj 	  /* Set sign_extend as any remaining bits.  */
597038fd1498Szrj 	  sign_extend = ~mask;
597138fd1498Szrj 
597238fd1498Szrj 	  /* Pick the lower word and sign-extend it.  */
597338fd1498Szrj 	  low = INTVAL (value);
597438fd1498Szrj 	  low &= mask;
597538fd1498Szrj 	  if (low & sign_bit)
597638fd1498Szrj 	    low |= sign_extend;
597738fd1498Szrj 
597838fd1498Szrj 	  /* Pick the higher word, shifted to the least significant
597938fd1498Szrj 	     bits, and sign-extend it.  */
598038fd1498Szrj 	  high = INTVAL (value);
598138fd1498Szrj 	  high >>= bits_per_word - 1;
598238fd1498Szrj 	  high >>= 1;
598338fd1498Szrj 	  high &= mask;
598438fd1498Szrj 	  if (high & sign_bit)
598538fd1498Szrj 	    high |= sign_extend;
598638fd1498Szrj 
598738fd1498Szrj 	  /* Store the words in the target machine order.  */
598838fd1498Szrj 	  if (WORDS_BIG_ENDIAN)
598938fd1498Szrj 	    {
599038fd1498Szrj 	      *first = GEN_INT (high);
599138fd1498Szrj 	      *second = GEN_INT (low);
599238fd1498Szrj 	    }
599338fd1498Szrj 	  else
599438fd1498Szrj 	    {
599538fd1498Szrj 	      *first = GEN_INT (low);
599638fd1498Szrj 	      *second = GEN_INT (high);
599738fd1498Szrj 	    }
599838fd1498Szrj 	}
599938fd1498Szrj       else
600038fd1498Szrj 	{
600138fd1498Szrj 	  /* The rule for using CONST_INT for a wider mode
600238fd1498Szrj 	     is that we regard the value as signed.
600338fd1498Szrj 	     So sign-extend it.  */
600438fd1498Szrj 	  rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
600538fd1498Szrj 	  if (WORDS_BIG_ENDIAN)
600638fd1498Szrj 	    {
600738fd1498Szrj 	      *first = high;
600838fd1498Szrj 	      *second = value;
600938fd1498Szrj 	    }
601038fd1498Szrj 	  else
601138fd1498Szrj 	    {
601238fd1498Szrj 	      *first = value;
601338fd1498Szrj 	      *second = high;
601438fd1498Szrj 	    }
601538fd1498Szrj 	}
601638fd1498Szrj     }
601738fd1498Szrj   else if (GET_CODE (value) == CONST_WIDE_INT)
601838fd1498Szrj     {
601938fd1498Szrj       /* All of this is scary code and needs to be converted to
602038fd1498Szrj 	 properly work with any size integer.  */
602138fd1498Szrj       gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
602238fd1498Szrj       if (WORDS_BIG_ENDIAN)
602338fd1498Szrj 	{
602438fd1498Szrj 	  *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
602538fd1498Szrj 	  *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
602638fd1498Szrj 	}
602738fd1498Szrj       else
602838fd1498Szrj 	{
602938fd1498Szrj 	  *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
603038fd1498Szrj 	  *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
603138fd1498Szrj 	}
603238fd1498Szrj     }
603338fd1498Szrj   else if (!CONST_DOUBLE_P (value))
603438fd1498Szrj     {
603538fd1498Szrj       if (WORDS_BIG_ENDIAN)
603638fd1498Szrj 	{
603738fd1498Szrj 	  *first = const0_rtx;
603838fd1498Szrj 	  *second = value;
603938fd1498Szrj 	}
604038fd1498Szrj       else
604138fd1498Szrj 	{
604238fd1498Szrj 	  *first = value;
604338fd1498Szrj 	  *second = const0_rtx;
604438fd1498Szrj 	}
604538fd1498Szrj     }
604638fd1498Szrj   else if (GET_MODE (value) == VOIDmode
604738fd1498Szrj 	   /* This is the old way we did CONST_DOUBLE integers.  */
604838fd1498Szrj 	   || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
604938fd1498Szrj     {
605038fd1498Szrj       /* In an integer, the words are defined as most and least significant.
605138fd1498Szrj 	 So order them by the target's convention.  */
605238fd1498Szrj       if (WORDS_BIG_ENDIAN)
605338fd1498Szrj 	{
605438fd1498Szrj 	  *first = GEN_INT (CONST_DOUBLE_HIGH (value));
605538fd1498Szrj 	  *second = GEN_INT (CONST_DOUBLE_LOW (value));
605638fd1498Szrj 	}
605738fd1498Szrj       else
605838fd1498Szrj 	{
605938fd1498Szrj 	  *first = GEN_INT (CONST_DOUBLE_LOW (value));
606038fd1498Szrj 	  *second = GEN_INT (CONST_DOUBLE_HIGH (value));
606138fd1498Szrj 	}
606238fd1498Szrj     }
606338fd1498Szrj   else
606438fd1498Szrj     {
606538fd1498Szrj       long l[2];
606638fd1498Szrj 
606738fd1498Szrj       /* Note, this converts the REAL_VALUE_TYPE to the target's
606838fd1498Szrj 	 format, splits up the floating point double and outputs
606938fd1498Szrj 	 exactly 32 bits of it into each of l[0] and l[1] --
607038fd1498Szrj 	 not necessarily BITS_PER_WORD bits.  */
607138fd1498Szrj       REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (value), l);
607238fd1498Szrj 
607338fd1498Szrj       /* If 32 bits is an entire word for the target, but not for the host,
607438fd1498Szrj 	 then sign-extend on the host so that the number will look the same
607538fd1498Szrj 	 way on the host that it would on the target.  See for instance
607638fd1498Szrj 	 simplify_unary_operation.  The #if is needed to avoid compiler
607738fd1498Szrj 	 warnings.  */
607838fd1498Szrj 
607938fd1498Szrj #if HOST_BITS_PER_LONG > 32
608038fd1498Szrj       if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
608138fd1498Szrj 	{
608238fd1498Szrj 	  if (l[0] & ((long) 1 << 31))
608338fd1498Szrj 	    l[0] |= ((unsigned long) (-1) << 32);
608438fd1498Szrj 	  if (l[1] & ((long) 1 << 31))
608538fd1498Szrj 	    l[1] |= ((unsigned long) (-1) << 32);
608638fd1498Szrj 	}
608738fd1498Szrj #endif
608838fd1498Szrj 
608938fd1498Szrj       *first = GEN_INT (l[0]);
609038fd1498Szrj       *second = GEN_INT (l[1]);
609138fd1498Szrj     }
609238fd1498Szrj }
609338fd1498Szrj 
609438fd1498Szrj /* Return true if X is a sign_extract or zero_extract from the least
609538fd1498Szrj    significant bit.  */
609638fd1498Szrj 
609738fd1498Szrj static bool
lsb_bitfield_op_p(rtx x)609838fd1498Szrj lsb_bitfield_op_p (rtx x)
609938fd1498Szrj {
610038fd1498Szrj   if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
610138fd1498Szrj     {
610238fd1498Szrj       machine_mode mode = GET_MODE (XEXP (x, 0));
610338fd1498Szrj       HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
610438fd1498Szrj       HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
610538fd1498Szrj       poly_int64 remaining_bits = GET_MODE_PRECISION (mode) - len;
610638fd1498Szrj 
610738fd1498Szrj       return known_eq (pos, BITS_BIG_ENDIAN ? remaining_bits : 0);
610838fd1498Szrj     }
610938fd1498Szrj   return false;
611038fd1498Szrj }
611138fd1498Szrj 
611238fd1498Szrj /* Strip outer address "mutations" from LOC and return a pointer to the
611338fd1498Szrj    inner value.  If OUTER_CODE is nonnull, store the code of the innermost
611438fd1498Szrj    stripped expression there.
611538fd1498Szrj 
611638fd1498Szrj    "Mutations" either convert between modes or apply some kind of
611738fd1498Szrj    extension, truncation or alignment.  */
611838fd1498Szrj 
611938fd1498Szrj rtx *
strip_address_mutations(rtx * loc,enum rtx_code * outer_code)612038fd1498Szrj strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
612138fd1498Szrj {
612238fd1498Szrj   for (;;)
612338fd1498Szrj     {
612438fd1498Szrj       enum rtx_code code = GET_CODE (*loc);
612538fd1498Szrj       if (GET_RTX_CLASS (code) == RTX_UNARY)
612638fd1498Szrj 	/* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
612738fd1498Szrj 	   used to convert between pointer sizes.  */
612838fd1498Szrj 	loc = &XEXP (*loc, 0);
612938fd1498Szrj       else if (lsb_bitfield_op_p (*loc))
613038fd1498Szrj 	/* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
613138fd1498Szrj 	   acts as a combined truncation and extension.  */
613238fd1498Szrj 	loc = &XEXP (*loc, 0);
613338fd1498Szrj       else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
613438fd1498Szrj 	/* (and ... (const_int -X)) is used to align to X bytes.  */
613538fd1498Szrj 	loc = &XEXP (*loc, 0);
613638fd1498Szrj       else if (code == SUBREG
613738fd1498Szrj                && !OBJECT_P (SUBREG_REG (*loc))
613838fd1498Szrj                && subreg_lowpart_p (*loc))
613938fd1498Szrj 	/* (subreg (operator ...) ...) inside and is used for mode
614038fd1498Szrj 	   conversion too.  */
614138fd1498Szrj 	loc = &SUBREG_REG (*loc);
614238fd1498Szrj       else
614338fd1498Szrj 	return loc;
614438fd1498Szrj       if (outer_code)
614538fd1498Szrj 	*outer_code = code;
614638fd1498Szrj     }
614738fd1498Szrj }
614838fd1498Szrj 
614938fd1498Szrj /* Return true if CODE applies some kind of scale.  The scaled value is
615038fd1498Szrj    is the first operand and the scale is the second.  */
615138fd1498Szrj 
615238fd1498Szrj static bool
binary_scale_code_p(enum rtx_code code)615338fd1498Szrj binary_scale_code_p (enum rtx_code code)
615438fd1498Szrj {
615538fd1498Szrj   return (code == MULT
615638fd1498Szrj           || code == ASHIFT
615738fd1498Szrj           /* Needed by ARM targets.  */
615838fd1498Szrj           || code == ASHIFTRT
615938fd1498Szrj           || code == LSHIFTRT
616038fd1498Szrj           || code == ROTATE
616138fd1498Szrj           || code == ROTATERT);
616238fd1498Szrj }
616338fd1498Szrj 
616438fd1498Szrj /* If *INNER can be interpreted as a base, return a pointer to the inner term
616538fd1498Szrj    (see address_info).  Return null otherwise.  */
616638fd1498Szrj 
616738fd1498Szrj static rtx *
get_base_term(rtx * inner)616838fd1498Szrj get_base_term (rtx *inner)
616938fd1498Szrj {
617038fd1498Szrj   if (GET_CODE (*inner) == LO_SUM)
617138fd1498Szrj     inner = strip_address_mutations (&XEXP (*inner, 0));
617238fd1498Szrj   if (REG_P (*inner)
617338fd1498Szrj       || MEM_P (*inner)
617438fd1498Szrj       || GET_CODE (*inner) == SUBREG
617538fd1498Szrj       || GET_CODE (*inner) == SCRATCH)
617638fd1498Szrj     return inner;
617738fd1498Szrj   return 0;
617838fd1498Szrj }
617938fd1498Szrj 
618038fd1498Szrj /* If *INNER can be interpreted as an index, return a pointer to the inner term
618138fd1498Szrj    (see address_info).  Return null otherwise.  */
618238fd1498Szrj 
618338fd1498Szrj static rtx *
get_index_term(rtx * inner)618438fd1498Szrj get_index_term (rtx *inner)
618538fd1498Szrj {
618638fd1498Szrj   /* At present, only constant scales are allowed.  */
618738fd1498Szrj   if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
618838fd1498Szrj     inner = strip_address_mutations (&XEXP (*inner, 0));
618938fd1498Szrj   if (REG_P (*inner)
619038fd1498Szrj       || MEM_P (*inner)
619138fd1498Szrj       || GET_CODE (*inner) == SUBREG
619238fd1498Szrj       || GET_CODE (*inner) == SCRATCH)
619338fd1498Szrj     return inner;
619438fd1498Szrj   return 0;
619538fd1498Szrj }
619638fd1498Szrj 
619738fd1498Szrj /* Set the segment part of address INFO to LOC, given that INNER is the
619838fd1498Szrj    unmutated value.  */
619938fd1498Szrj 
620038fd1498Szrj static void
set_address_segment(struct address_info * info,rtx * loc,rtx * inner)620138fd1498Szrj set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
620238fd1498Szrj {
620338fd1498Szrj   gcc_assert (!info->segment);
620438fd1498Szrj   info->segment = loc;
620538fd1498Szrj   info->segment_term = inner;
620638fd1498Szrj }
620738fd1498Szrj 
620838fd1498Szrj /* Set the base part of address INFO to LOC, given that INNER is the
620938fd1498Szrj    unmutated value.  */
621038fd1498Szrj 
621138fd1498Szrj static void
set_address_base(struct address_info * info,rtx * loc,rtx * inner)621238fd1498Szrj set_address_base (struct address_info *info, rtx *loc, rtx *inner)
621338fd1498Szrj {
621438fd1498Szrj   gcc_assert (!info->base);
621538fd1498Szrj   info->base = loc;
621638fd1498Szrj   info->base_term = inner;
621738fd1498Szrj }
621838fd1498Szrj 
621938fd1498Szrj /* Set the index part of address INFO to LOC, given that INNER is the
622038fd1498Szrj    unmutated value.  */
622138fd1498Szrj 
622238fd1498Szrj static void
set_address_index(struct address_info * info,rtx * loc,rtx * inner)622338fd1498Szrj set_address_index (struct address_info *info, rtx *loc, rtx *inner)
622438fd1498Szrj {
622538fd1498Szrj   gcc_assert (!info->index);
622638fd1498Szrj   info->index = loc;
622738fd1498Szrj   info->index_term = inner;
622838fd1498Szrj }
622938fd1498Szrj 
623038fd1498Szrj /* Set the displacement part of address INFO to LOC, given that INNER
623138fd1498Szrj    is the constant term.  */
623238fd1498Szrj 
623338fd1498Szrj static void
set_address_disp(struct address_info * info,rtx * loc,rtx * inner)623438fd1498Szrj set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
623538fd1498Szrj {
623638fd1498Szrj   gcc_assert (!info->disp);
623738fd1498Szrj   info->disp = loc;
623838fd1498Szrj   info->disp_term = inner;
623938fd1498Szrj }
624038fd1498Szrj 
624138fd1498Szrj /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address.  Set up the
624238fd1498Szrj    rest of INFO accordingly.  */
624338fd1498Szrj 
624438fd1498Szrj static void
decompose_incdec_address(struct address_info * info)624538fd1498Szrj decompose_incdec_address (struct address_info *info)
624638fd1498Szrj {
624738fd1498Szrj   info->autoinc_p = true;
624838fd1498Szrj 
624938fd1498Szrj   rtx *base = &XEXP (*info->inner, 0);
625038fd1498Szrj   set_address_base (info, base, base);
625138fd1498Szrj   gcc_checking_assert (info->base == info->base_term);
625238fd1498Szrj 
625338fd1498Szrj   /* These addresses are only valid when the size of the addressed
625438fd1498Szrj      value is known.  */
625538fd1498Szrj   gcc_checking_assert (info->mode != VOIDmode);
625638fd1498Szrj }
625738fd1498Szrj 
625838fd1498Szrj /* INFO->INNER describes a {PRE,POST}_MODIFY address.  Set up the rest
625938fd1498Szrj    of INFO accordingly.  */
626038fd1498Szrj 
626138fd1498Szrj static void
decompose_automod_address(struct address_info * info)626238fd1498Szrj decompose_automod_address (struct address_info *info)
626338fd1498Szrj {
626438fd1498Szrj   info->autoinc_p = true;
626538fd1498Szrj 
626638fd1498Szrj   rtx *base = &XEXP (*info->inner, 0);
626738fd1498Szrj   set_address_base (info, base, base);
626838fd1498Szrj   gcc_checking_assert (info->base == info->base_term);
626938fd1498Szrj 
627038fd1498Szrj   rtx plus = XEXP (*info->inner, 1);
627138fd1498Szrj   gcc_assert (GET_CODE (plus) == PLUS);
627238fd1498Szrj 
627338fd1498Szrj   info->base_term2 = &XEXP (plus, 0);
627438fd1498Szrj   gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
627538fd1498Szrj 
627638fd1498Szrj   rtx *step = &XEXP (plus, 1);
627738fd1498Szrj   rtx *inner_step = strip_address_mutations (step);
627838fd1498Szrj   if (CONSTANT_P (*inner_step))
627938fd1498Szrj     set_address_disp (info, step, inner_step);
628038fd1498Szrj   else
628138fd1498Szrj     set_address_index (info, step, inner_step);
628238fd1498Szrj }
628338fd1498Szrj 
628438fd1498Szrj /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
628538fd1498Szrj    values in [PTR, END).  Return a pointer to the end of the used array.  */
628638fd1498Szrj 
628738fd1498Szrj static rtx **
extract_plus_operands(rtx * loc,rtx ** ptr,rtx ** end)628838fd1498Szrj extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
628938fd1498Szrj {
629038fd1498Szrj   rtx x = *loc;
629138fd1498Szrj   if (GET_CODE (x) == PLUS)
629238fd1498Szrj     {
629338fd1498Szrj       ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
629438fd1498Szrj       ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
629538fd1498Szrj     }
629638fd1498Szrj   else
629738fd1498Szrj     {
629838fd1498Szrj       gcc_assert (ptr != end);
629938fd1498Szrj       *ptr++ = loc;
630038fd1498Szrj     }
630138fd1498Szrj   return ptr;
630238fd1498Szrj }
630338fd1498Szrj 
630438fd1498Szrj /* Evaluate the likelihood of X being a base or index value, returning
630538fd1498Szrj    positive if it is likely to be a base, negative if it is likely to be
630638fd1498Szrj    an index, and 0 if we can't tell.  Make the magnitude of the return
630738fd1498Szrj    value reflect the amount of confidence we have in the answer.
630838fd1498Szrj 
630938fd1498Szrj    MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1.  */
631038fd1498Szrj 
631138fd1498Szrj static int
baseness(rtx x,machine_mode mode,addr_space_t as,enum rtx_code outer_code,enum rtx_code index_code)631238fd1498Szrj baseness (rtx x, machine_mode mode, addr_space_t as,
631338fd1498Szrj 	  enum rtx_code outer_code, enum rtx_code index_code)
631438fd1498Szrj {
631538fd1498Szrj   /* Believe *_POINTER unless the address shape requires otherwise.  */
631638fd1498Szrj   if (REG_P (x) && REG_POINTER (x))
631738fd1498Szrj     return 2;
631838fd1498Szrj   if (MEM_P (x) && MEM_POINTER (x))
631938fd1498Szrj     return 2;
632038fd1498Szrj 
632138fd1498Szrj   if (REG_P (x) && HARD_REGISTER_P (x))
632238fd1498Szrj     {
632338fd1498Szrj       /* X is a hard register.  If it only fits one of the base
632438fd1498Szrj 	 or index classes, choose that interpretation.  */
632538fd1498Szrj       int regno = REGNO (x);
632638fd1498Szrj       bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
632738fd1498Szrj       bool index_p = REGNO_OK_FOR_INDEX_P (regno);
632838fd1498Szrj       if (base_p != index_p)
632938fd1498Szrj 	return base_p ? 1 : -1;
633038fd1498Szrj     }
633138fd1498Szrj   return 0;
633238fd1498Szrj }
633338fd1498Szrj 
633438fd1498Szrj /* INFO->INNER describes a normal, non-automodified address.
633538fd1498Szrj    Fill in the rest of INFO accordingly.  */
633638fd1498Szrj 
633738fd1498Szrj static void
decompose_normal_address(struct address_info * info)633838fd1498Szrj decompose_normal_address (struct address_info *info)
633938fd1498Szrj {
634038fd1498Szrj   /* Treat the address as the sum of up to four values.  */
634138fd1498Szrj   rtx *ops[4];
634238fd1498Szrj   size_t n_ops = extract_plus_operands (info->inner, ops,
634338fd1498Szrj 					ops + ARRAY_SIZE (ops)) - ops;
634438fd1498Szrj 
634538fd1498Szrj   /* If there is more than one component, any base component is in a PLUS.  */
634638fd1498Szrj   if (n_ops > 1)
634738fd1498Szrj     info->base_outer_code = PLUS;
634838fd1498Szrj 
634938fd1498Szrj   /* Try to classify each sum operand now.  Leave those that could be
635038fd1498Szrj      either a base or an index in OPS.  */
635138fd1498Szrj   rtx *inner_ops[4];
635238fd1498Szrj   size_t out = 0;
635338fd1498Szrj   for (size_t in = 0; in < n_ops; ++in)
635438fd1498Szrj     {
635538fd1498Szrj       rtx *loc = ops[in];
635638fd1498Szrj       rtx *inner = strip_address_mutations (loc);
635738fd1498Szrj       if (CONSTANT_P (*inner))
635838fd1498Szrj 	set_address_disp (info, loc, inner);
635938fd1498Szrj       else if (GET_CODE (*inner) == UNSPEC)
636038fd1498Szrj 	set_address_segment (info, loc, inner);
636138fd1498Szrj       else
636238fd1498Szrj 	{
636338fd1498Szrj 	  /* The only other possibilities are a base or an index.  */
636438fd1498Szrj 	  rtx *base_term = get_base_term (inner);
636538fd1498Szrj 	  rtx *index_term = get_index_term (inner);
636638fd1498Szrj 	  gcc_assert (base_term || index_term);
636738fd1498Szrj 	  if (!base_term)
636838fd1498Szrj 	    set_address_index (info, loc, index_term);
636938fd1498Szrj 	  else if (!index_term)
637038fd1498Szrj 	    set_address_base (info, loc, base_term);
637138fd1498Szrj 	  else
637238fd1498Szrj 	    {
637338fd1498Szrj 	      gcc_assert (base_term == index_term);
637438fd1498Szrj 	      ops[out] = loc;
637538fd1498Szrj 	      inner_ops[out] = base_term;
637638fd1498Szrj 	      ++out;
637738fd1498Szrj 	    }
637838fd1498Szrj 	}
637938fd1498Szrj     }
638038fd1498Szrj 
638138fd1498Szrj   /* Classify the remaining OPS members as bases and indexes.  */
638238fd1498Szrj   if (out == 1)
638338fd1498Szrj     {
638438fd1498Szrj       /* If we haven't seen a base or an index yet, assume that this is
638538fd1498Szrj 	 the base.  If we were confident that another term was the base
638638fd1498Szrj 	 or index, treat the remaining operand as the other kind.  */
638738fd1498Szrj       if (!info->base)
638838fd1498Szrj 	set_address_base (info, ops[0], inner_ops[0]);
638938fd1498Szrj       else
639038fd1498Szrj 	set_address_index (info, ops[0], inner_ops[0]);
639138fd1498Szrj     }
639238fd1498Szrj   else if (out == 2)
639338fd1498Szrj     {
639438fd1498Szrj       /* In the event of a tie, assume the base comes first.  */
639538fd1498Szrj       if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
639638fd1498Szrj 		    GET_CODE (*ops[1]))
639738fd1498Szrj 	  >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
639838fd1498Szrj 		       GET_CODE (*ops[0])))
639938fd1498Szrj 	{
640038fd1498Szrj 	  set_address_base (info, ops[0], inner_ops[0]);
640138fd1498Szrj 	  set_address_index (info, ops[1], inner_ops[1]);
640238fd1498Szrj 	}
640338fd1498Szrj       else
640438fd1498Szrj 	{
640538fd1498Szrj 	  set_address_base (info, ops[1], inner_ops[1]);
640638fd1498Szrj 	  set_address_index (info, ops[0], inner_ops[0]);
640738fd1498Szrj 	}
640838fd1498Szrj     }
640938fd1498Szrj   else
641038fd1498Szrj     gcc_assert (out == 0);
641138fd1498Szrj }
641238fd1498Szrj 
641338fd1498Szrj /* Describe address *LOC in *INFO.  MODE is the mode of the addressed value,
641438fd1498Szrj    or VOIDmode if not known.  AS is the address space associated with LOC.
641538fd1498Szrj    OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise.  */
641638fd1498Szrj 
641738fd1498Szrj void
decompose_address(struct address_info * info,rtx * loc,machine_mode mode,addr_space_t as,enum rtx_code outer_code)641838fd1498Szrj decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
641938fd1498Szrj 		   addr_space_t as, enum rtx_code outer_code)
642038fd1498Szrj {
642138fd1498Szrj   memset (info, 0, sizeof (*info));
642238fd1498Szrj   info->mode = mode;
642338fd1498Szrj   info->as = as;
642438fd1498Szrj   info->addr_outer_code = outer_code;
642538fd1498Szrj   info->outer = loc;
642638fd1498Szrj   info->inner = strip_address_mutations (loc, &outer_code);
642738fd1498Szrj   info->base_outer_code = outer_code;
642838fd1498Szrj   switch (GET_CODE (*info->inner))
642938fd1498Szrj     {
643038fd1498Szrj     case PRE_DEC:
643138fd1498Szrj     case PRE_INC:
643238fd1498Szrj     case POST_DEC:
643338fd1498Szrj     case POST_INC:
643438fd1498Szrj       decompose_incdec_address (info);
643538fd1498Szrj       break;
643638fd1498Szrj 
643738fd1498Szrj     case PRE_MODIFY:
643838fd1498Szrj     case POST_MODIFY:
643938fd1498Szrj       decompose_automod_address (info);
644038fd1498Szrj       break;
644138fd1498Szrj 
644238fd1498Szrj     default:
644338fd1498Szrj       decompose_normal_address (info);
644438fd1498Szrj       break;
644538fd1498Szrj     }
644638fd1498Szrj }
644738fd1498Szrj 
644838fd1498Szrj /* Describe address operand LOC in INFO.  */
644938fd1498Szrj 
645038fd1498Szrj void
decompose_lea_address(struct address_info * info,rtx * loc)645138fd1498Szrj decompose_lea_address (struct address_info *info, rtx *loc)
645238fd1498Szrj {
645338fd1498Szrj   decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
645438fd1498Szrj }
645538fd1498Szrj 
645638fd1498Szrj /* Describe the address of MEM X in INFO.  */
645738fd1498Szrj 
645838fd1498Szrj void
decompose_mem_address(struct address_info * info,rtx x)645938fd1498Szrj decompose_mem_address (struct address_info *info, rtx x)
646038fd1498Szrj {
646138fd1498Szrj   gcc_assert (MEM_P (x));
646238fd1498Szrj   decompose_address (info, &XEXP (x, 0), GET_MODE (x),
646338fd1498Szrj 		     MEM_ADDR_SPACE (x), MEM);
646438fd1498Szrj }
646538fd1498Szrj 
646638fd1498Szrj /* Update INFO after a change to the address it describes.  */
646738fd1498Szrj 
646838fd1498Szrj void
update_address(struct address_info * info)646938fd1498Szrj update_address (struct address_info *info)
647038fd1498Szrj {
647138fd1498Szrj   decompose_address (info, info->outer, info->mode, info->as,
647238fd1498Szrj 		     info->addr_outer_code);
647338fd1498Szrj }
647438fd1498Szrj 
647538fd1498Szrj /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
647638fd1498Szrj    more complicated than that.  */
647738fd1498Szrj 
647838fd1498Szrj HOST_WIDE_INT
get_index_scale(const struct address_info * info)647938fd1498Szrj get_index_scale (const struct address_info *info)
648038fd1498Szrj {
648138fd1498Szrj   rtx index = *info->index;
648238fd1498Szrj   if (GET_CODE (index) == MULT
648338fd1498Szrj       && CONST_INT_P (XEXP (index, 1))
648438fd1498Szrj       && info->index_term == &XEXP (index, 0))
648538fd1498Szrj     return INTVAL (XEXP (index, 1));
648638fd1498Szrj 
648738fd1498Szrj   if (GET_CODE (index) == ASHIFT
648838fd1498Szrj       && CONST_INT_P (XEXP (index, 1))
648938fd1498Szrj       && info->index_term == &XEXP (index, 0))
649038fd1498Szrj     return HOST_WIDE_INT_1 << INTVAL (XEXP (index, 1));
649138fd1498Szrj 
649238fd1498Szrj   if (info->index == info->index_term)
649338fd1498Szrj     return 1;
649438fd1498Szrj 
649538fd1498Szrj   return 0;
649638fd1498Szrj }
649738fd1498Szrj 
649838fd1498Szrj /* Return the "index code" of INFO, in the form required by
649938fd1498Szrj    ok_for_base_p_1.  */
650038fd1498Szrj 
650138fd1498Szrj enum rtx_code
get_index_code(const struct address_info * info)650238fd1498Szrj get_index_code (const struct address_info *info)
650338fd1498Szrj {
650438fd1498Szrj   if (info->index)
650538fd1498Szrj     return GET_CODE (*info->index);
650638fd1498Szrj 
650738fd1498Szrj   if (info->disp)
650838fd1498Szrj     return GET_CODE (*info->disp);
650938fd1498Szrj 
651038fd1498Szrj   return SCRATCH;
651138fd1498Szrj }
651238fd1498Szrj 
651338fd1498Szrj /* Return true if RTL X contains a SYMBOL_REF.  */
651438fd1498Szrj 
651538fd1498Szrj bool
contains_symbol_ref_p(const_rtx x)651638fd1498Szrj contains_symbol_ref_p (const_rtx x)
651738fd1498Szrj {
651838fd1498Szrj   subrtx_iterator::array_type array;
651938fd1498Szrj   FOR_EACH_SUBRTX (iter, array, x, ALL)
652038fd1498Szrj     if (SYMBOL_REF_P (*iter))
652138fd1498Szrj       return true;
652238fd1498Szrj 
652338fd1498Szrj   return false;
652438fd1498Szrj }
652538fd1498Szrj 
652638fd1498Szrj /* Return true if RTL X contains a SYMBOL_REF or LABEL_REF.  */
652738fd1498Szrj 
652838fd1498Szrj bool
contains_symbolic_reference_p(const_rtx x)652938fd1498Szrj contains_symbolic_reference_p (const_rtx x)
653038fd1498Szrj {
653138fd1498Szrj   subrtx_iterator::array_type array;
653238fd1498Szrj   FOR_EACH_SUBRTX (iter, array, x, ALL)
653338fd1498Szrj     if (SYMBOL_REF_P (*iter) || GET_CODE (*iter) == LABEL_REF)
653438fd1498Szrj       return true;
653538fd1498Szrj 
653638fd1498Szrj   return false;
653738fd1498Szrj }
653838fd1498Szrj 
653938fd1498Szrj /* Return true if X contains a thread-local symbol.  */
654038fd1498Szrj 
654138fd1498Szrj bool
tls_referenced_p(const_rtx x)654238fd1498Szrj tls_referenced_p (const_rtx x)
654338fd1498Szrj {
654438fd1498Szrj   if (!targetm.have_tls)
654538fd1498Szrj     return false;
654638fd1498Szrj 
654738fd1498Szrj   subrtx_iterator::array_type array;
654838fd1498Szrj   FOR_EACH_SUBRTX (iter, array, x, ALL)
654938fd1498Szrj     if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
655038fd1498Szrj       return true;
655138fd1498Szrj   return false;
655238fd1498Szrj }
6553