xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/rtlanal.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Analyze RTL for GNU compiler.
2    Copyright (C) 1987-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "target.h"
31 #include "output.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "regs.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "machmode.h"
39 #include "input.h"
40 #include "function.h"
41 #include "predict.h"
42 #include "basic-block.h"
43 #include "df.h"
44 #include "symtab.h"
45 #include "wide-int.h"
46 #include "inchash.h"
47 #include "tree.h"
48 #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
49 #include "addresses.h"
50 #include "rtl-iter.h"
51 
52 /* Forward declarations */
53 static void set_of_1 (rtx, const_rtx, void *);
54 static bool covers_regno_p (const_rtx, unsigned int);
55 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
56 static int computed_jump_p_1 (const_rtx);
57 static void parms_set (rtx, const_rtx, void *);
58 
59 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, machine_mode,
60                                                    const_rtx, machine_mode,
61                                                    unsigned HOST_WIDE_INT);
62 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, machine_mode,
63 					     const_rtx, machine_mode,
64                                              unsigned HOST_WIDE_INT);
65 static unsigned int cached_num_sign_bit_copies (const_rtx, machine_mode, const_rtx,
66                                                 machine_mode,
67                                                 unsigned int);
68 static unsigned int num_sign_bit_copies1 (const_rtx, machine_mode, const_rtx,
69                                           machine_mode, unsigned int);
70 
71 rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
72 rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
73 
74 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
75    If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
76    SIGN_EXTEND then while narrowing we also have to enforce the
77    representation and sign-extend the value to mode DESTINATION_REP.
78 
79    If the value is already sign-extended to DESTINATION_REP mode we
80    can just switch to DESTINATION mode on it.  For each pair of
81    integral modes SOURCE and DESTINATION, when truncating from SOURCE
82    to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
83    contains the number of high-order bits in SOURCE that have to be
84    copies of the sign-bit so that we can do this mode-switch to
85    DESTINATION.  */
86 
87 static unsigned int
88 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
89 
90 /* Store X into index I of ARRAY.  ARRAY is known to have at least I
91    elements.  Return the new base of ARRAY.  */
92 
93 template <typename T>
94 typename T::value_type *
95 generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
96 						  value_type *base,
97 						  size_t i, value_type x)
98 {
99   if (base == array.stack)
100     {
101       if (i < LOCAL_ELEMS)
102 	{
103 	  base[i] = x;
104 	  return base;
105 	}
106       gcc_checking_assert (i == LOCAL_ELEMS);
107       vec_safe_grow (array.heap, i + 1);
108       base = array.heap->address ();
109       memcpy (base, array.stack, sizeof (array.stack));
110       base[LOCAL_ELEMS] = x;
111       return base;
112     }
113   unsigned int length = array.heap->length ();
114   if (length > i)
115     {
116       gcc_checking_assert (base == array.heap->address ());
117       base[i] = x;
118       return base;
119     }
120   else
121     {
122       gcc_checking_assert (i == length);
123       vec_safe_push (array.heap, x);
124       return array.heap->address ();
125     }
126 }
127 
128 /* Add the subrtxes of X to worklist ARRAY, starting at END.  Return the
129    number of elements added to the worklist.  */
130 
131 template <typename T>
132 size_t
133 generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
134 						    value_type *base,
135 						    size_t end, rtx_type x)
136 {
137   enum rtx_code code = GET_CODE (x);
138   const char *format = GET_RTX_FORMAT (code);
139   size_t orig_end = end;
140   if (__builtin_expect (INSN_P (x), false))
141     {
142       /* Put the pattern at the top of the queue, since that's what
143 	 we're likely to want most.  It also allows for the SEQUENCE
144 	 code below.  */
145       for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
146 	if (format[i] == 'e')
147 	  {
148 	    value_type subx = T::get_value (x->u.fld[i].rt_rtx);
149 	    if (__builtin_expect (end < LOCAL_ELEMS, true))
150 	      base[end++] = subx;
151 	    else
152 	      base = add_single_to_queue (array, base, end++, subx);
153 	  }
154     }
155   else
156     for (int i = 0; format[i]; ++i)
157       if (format[i] == 'e')
158 	{
159 	  value_type subx = T::get_value (x->u.fld[i].rt_rtx);
160 	  if (__builtin_expect (end < LOCAL_ELEMS, true))
161 	    base[end++] = subx;
162 	  else
163 	    base = add_single_to_queue (array, base, end++, subx);
164 	}
165       else if (format[i] == 'E')
166 	{
167 	  unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
168 	  rtx *vec = x->u.fld[i].rt_rtvec->elem;
169 	  if (__builtin_expect (end + length <= LOCAL_ELEMS, true))
170 	    for (unsigned int j = 0; j < length; j++)
171 	      base[end++] = T::get_value (vec[j]);
172 	  else
173 	    for (unsigned int j = 0; j < length; j++)
174 	      base = add_single_to_queue (array, base, end++,
175 					  T::get_value (vec[j]));
176 	  if (code == SEQUENCE && end == length)
177 	    /* If the subrtxes of the sequence fill the entire array then
178 	       we know that no other parts of a containing insn are queued.
179 	       The caller is therefore iterating over the sequence as a
180 	       PATTERN (...), so we also want the patterns of the
181 	       subinstructions.  */
182 	    for (unsigned int j = 0; j < length; j++)
183 	      {
184 		typename T::rtx_type x = T::get_rtx (base[j]);
185 		if (INSN_P (x))
186 		  base[j] = T::get_value (PATTERN (x));
187 	      }
188 	}
189   return end - orig_end;
190 }
191 
192 template <typename T>
193 void
194 generic_subrtx_iterator <T>::free_array (array_type &array)
195 {
196   vec_free (array.heap);
197 }
198 
199 template <typename T>
200 const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
201 
202 template class generic_subrtx_iterator <const_rtx_accessor>;
203 template class generic_subrtx_iterator <rtx_var_accessor>;
204 template class generic_subrtx_iterator <rtx_ptr_accessor>;
205 
206 /* Return 1 if the value of X is unstable
207    (would be different at a different point in the program).
208    The frame pointer, arg pointer, etc. are considered stable
209    (within one function) and so is anything marked `unchanging'.  */
210 
211 int
212 rtx_unstable_p (const_rtx x)
213 {
214   const RTX_CODE code = GET_CODE (x);
215   int i;
216   const char *fmt;
217 
218   switch (code)
219     {
220     case MEM:
221       return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
222 
223     case CONST:
224     CASE_CONST_ANY:
225     case SYMBOL_REF:
226     case LABEL_REF:
227       return 0;
228 
229     case REG:
230       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
231       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
232 	  /* The arg pointer varies if it is not a fixed register.  */
233 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
234 	return 0;
235       /* ??? When call-clobbered, the value is stable modulo the restore
236 	 that must happen after a call.  This currently screws up local-alloc
237 	 into believing that the restore is not needed.  */
238       if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
239 	return 0;
240       return 1;
241 
242     case ASM_OPERANDS:
243       if (MEM_VOLATILE_P (x))
244 	return 1;
245 
246       /* Fall through.  */
247 
248     default:
249       break;
250     }
251 
252   fmt = GET_RTX_FORMAT (code);
253   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
254     if (fmt[i] == 'e')
255       {
256 	if (rtx_unstable_p (XEXP (x, i)))
257 	  return 1;
258       }
259     else if (fmt[i] == 'E')
260       {
261 	int j;
262 	for (j = 0; j < XVECLEN (x, i); j++)
263 	  if (rtx_unstable_p (XVECEXP (x, i, j)))
264 	    return 1;
265       }
266 
267   return 0;
268 }
269 
270 /* Return 1 if X has a value that can vary even between two
271    executions of the program.  0 means X can be compared reliably
272    against certain constants or near-constants.
273    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
274    zero, we are slightly more conservative.
275    The frame pointer and the arg pointer are considered constant.  */
276 
277 bool
278 rtx_varies_p (const_rtx x, bool for_alias)
279 {
280   RTX_CODE code;
281   int i;
282   const char *fmt;
283 
284   if (!x)
285     return 0;
286 
287   code = GET_CODE (x);
288   switch (code)
289     {
290     case MEM:
291       return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
292 
293     case CONST:
294     CASE_CONST_ANY:
295     case SYMBOL_REF:
296     case LABEL_REF:
297       return 0;
298 
299     case REG:
300       /* Note that we have to test for the actual rtx used for the frame
301 	 and arg pointers and not just the register number in case we have
302 	 eliminated the frame and/or arg pointer and are using it
303 	 for pseudos.  */
304       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
305 	  /* The arg pointer varies if it is not a fixed register.  */
306 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
307 	return 0;
308       if (x == pic_offset_table_rtx
309 	  /* ??? When call-clobbered, the value is stable modulo the restore
310 	     that must happen after a call.  This currently screws up
311 	     local-alloc into believing that the restore is not needed, so we
312 	     must return 0 only if we are called from alias analysis.  */
313 	  && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
314 	return 0;
315       return 1;
316 
317     case LO_SUM:
318       /* The operand 0 of a LO_SUM is considered constant
319 	 (in fact it is related specifically to operand 1)
320 	 during alias analysis.  */
321       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
322 	     || rtx_varies_p (XEXP (x, 1), for_alias);
323 
324     case ASM_OPERANDS:
325       if (MEM_VOLATILE_P (x))
326 	return 1;
327 
328       /* Fall through.  */
329 
330     default:
331       break;
332     }
333 
334   fmt = GET_RTX_FORMAT (code);
335   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
336     if (fmt[i] == 'e')
337       {
338 	if (rtx_varies_p (XEXP (x, i), for_alias))
339 	  return 1;
340       }
341     else if (fmt[i] == 'E')
342       {
343 	int j;
344 	for (j = 0; j < XVECLEN (x, i); j++)
345 	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
346 	    return 1;
347       }
348 
349   return 0;
350 }
351 
352 /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE
353    bytes can cause a trap.  MODE is the mode of the MEM (not that of X) and
354    UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
355    references on strict alignment machines.  */
356 
357 static int
358 rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
359 		       machine_mode mode, bool unaligned_mems)
360 {
361   enum rtx_code code = GET_CODE (x);
362 
363   /* The offset must be a multiple of the mode size if we are considering
364      unaligned memory references on strict alignment machines.  */
365   if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0)
366     {
367       HOST_WIDE_INT actual_offset = offset;
368 
369 #ifdef SPARC_STACK_BOUNDARY_HACK
370       /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
371 	     the real alignment of %sp.  However, when it does this, the
372 	     alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
373       if (SPARC_STACK_BOUNDARY_HACK
374 	  && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
375 	actual_offset -= STACK_POINTER_OFFSET;
376 #endif
377 
378       if (actual_offset % GET_MODE_SIZE (mode) != 0)
379 	return 1;
380     }
381 
382   switch (code)
383     {
384     case SYMBOL_REF:
385       if (SYMBOL_REF_WEAK (x))
386 	return 1;
387       if (!CONSTANT_POOL_ADDRESS_P (x))
388 	{
389 	  tree decl;
390 	  HOST_WIDE_INT decl_size;
391 
392 	  if (offset < 0)
393 	    return 1;
394 	  if (size == 0)
395 	    size = GET_MODE_SIZE (mode);
396 	  if (size == 0)
397 	    return offset != 0;
398 
399 	  /* If the size of the access or of the symbol is unknown,
400 	     assume the worst.  */
401 	  decl = SYMBOL_REF_DECL (x);
402 
403 	  /* Else check that the access is in bounds.  TODO: restructure
404 	     expr_size/tree_expr_size/int_expr_size and just use the latter.  */
405 	  if (!decl)
406 	    decl_size = -1;
407 	  else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
408 	    decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl))
409 			 ? tree_to_shwi (DECL_SIZE_UNIT (decl))
410 			 : -1);
411 	  else if (TREE_CODE (decl) == STRING_CST)
412 	    decl_size = TREE_STRING_LENGTH (decl);
413 	  else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
414 	    decl_size = int_size_in_bytes (TREE_TYPE (decl));
415 	  else
416 	    decl_size = -1;
417 
418 	  return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
419         }
420 
421       return 0;
422 
423     case LABEL_REF:
424       return 0;
425 
426     case REG:
427       /* Stack references are assumed not to trap, but we need to deal with
428 	 nonsensical offsets.  */
429       if (x == frame_pointer_rtx)
430 	{
431 	  HOST_WIDE_INT adj_offset = offset - STARTING_FRAME_OFFSET;
432 	  if (size == 0)
433 	    size = GET_MODE_SIZE (mode);
434 	  if (FRAME_GROWS_DOWNWARD)
435 	    {
436 	      if (adj_offset < frame_offset || adj_offset + size - 1 >= 0)
437 		return 1;
438 	    }
439 	  else
440 	    {
441 	      if (adj_offset < 0 || adj_offset + size - 1 >= frame_offset)
442 		return 1;
443 	    }
444 	  return 0;
445 	}
446       /* ??? Need to add a similar guard for nonsensical offsets.  */
447       if (x == hard_frame_pointer_rtx
448 	  || x == stack_pointer_rtx
449 	  /* The arg pointer varies if it is not a fixed register.  */
450 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
451 	return 0;
452       /* All of the virtual frame registers are stack references.  */
453       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
454 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
455 	return 0;
456       return 1;
457 
458     case CONST:
459       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
460 				    mode, unaligned_mems);
461 
462     case PLUS:
463       /* An address is assumed not to trap if:
464          - it is the pic register plus a constant.  */
465       if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
466 	return 0;
467 
468       /* - or it is an address that can't trap plus a constant integer.  */
469       if (CONST_INT_P (XEXP (x, 1))
470 	  && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
471 				     size, mode, unaligned_mems))
472 	return 0;
473 
474       return 1;
475 
476     case LO_SUM:
477     case PRE_MODIFY:
478       return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
479 				    mode, unaligned_mems);
480 
481     case PRE_DEC:
482     case PRE_INC:
483     case POST_DEC:
484     case POST_INC:
485     case POST_MODIFY:
486       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
487 				    mode, unaligned_mems);
488 
489     default:
490       break;
491     }
492 
493   /* If it isn't one of the case above, it can cause a trap.  */
494   return 1;
495 }
496 
497 /* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
498 
499 int
500 rtx_addr_can_trap_p (const_rtx x)
501 {
502   return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
503 }
504 
505 /* Return true if X is an address that is known to not be zero.  */
506 
507 bool
508 nonzero_address_p (const_rtx x)
509 {
510   const enum rtx_code code = GET_CODE (x);
511 
512   switch (code)
513     {
514     case SYMBOL_REF:
515       return !SYMBOL_REF_WEAK (x);
516 
517     case LABEL_REF:
518       return true;
519 
520     case REG:
521       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
522       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
523 	  || x == stack_pointer_rtx
524 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
525 	return true;
526       /* All of the virtual frame registers are stack references.  */
527       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
528 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
529 	return true;
530       return false;
531 
532     case CONST:
533       return nonzero_address_p (XEXP (x, 0));
534 
535     case PLUS:
536       /* Handle PIC references.  */
537       if (XEXP (x, 0) == pic_offset_table_rtx
538 	       && CONSTANT_P (XEXP (x, 1)))
539 	return true;
540       return false;
541 
542     case PRE_MODIFY:
543       /* Similar to the above; allow positive offsets.  Further, since
544 	 auto-inc is only allowed in memories, the register must be a
545 	 pointer.  */
546       if (CONST_INT_P (XEXP (x, 1))
547 	  && INTVAL (XEXP (x, 1)) > 0)
548 	return true;
549       return nonzero_address_p (XEXP (x, 0));
550 
551     case PRE_INC:
552       /* Similarly.  Further, the offset is always positive.  */
553       return true;
554 
555     case PRE_DEC:
556     case POST_DEC:
557     case POST_INC:
558     case POST_MODIFY:
559       return nonzero_address_p (XEXP (x, 0));
560 
561     case LO_SUM:
562       return nonzero_address_p (XEXP (x, 1));
563 
564     default:
565       break;
566     }
567 
568   /* If it isn't one of the case above, might be zero.  */
569   return false;
570 }
571 
572 /* Return 1 if X refers to a memory location whose address
573    cannot be compared reliably with constant addresses,
574    or if X refers to a BLKmode memory object.
575    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
576    zero, we are slightly more conservative.  */
577 
578 bool
579 rtx_addr_varies_p (const_rtx x, bool for_alias)
580 {
581   enum rtx_code code;
582   int i;
583   const char *fmt;
584 
585   if (x == 0)
586     return 0;
587 
588   code = GET_CODE (x);
589   if (code == MEM)
590     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
591 
592   fmt = GET_RTX_FORMAT (code);
593   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
594     if (fmt[i] == 'e')
595       {
596 	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
597 	  return 1;
598       }
599     else if (fmt[i] == 'E')
600       {
601 	int j;
602 	for (j = 0; j < XVECLEN (x, i); j++)
603 	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
604 	    return 1;
605       }
606   return 0;
607 }
608 
609 /* Return the CALL in X if there is one.  */
610 
611 rtx
612 get_call_rtx_from (rtx x)
613 {
614   if (INSN_P (x))
615     x = PATTERN (x);
616   if (GET_CODE (x) == PARALLEL)
617     x = XVECEXP (x, 0, 0);
618   if (GET_CODE (x) == SET)
619     x = SET_SRC (x);
620   if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
621     return x;
622   return NULL_RTX;
623 }
624 
625 /* Return the value of the integer term in X, if one is apparent;
626    otherwise return 0.
627    Only obvious integer terms are detected.
628    This is used in cse.c with the `related_value' field.  */
629 
630 HOST_WIDE_INT
631 get_integer_term (const_rtx x)
632 {
633   if (GET_CODE (x) == CONST)
634     x = XEXP (x, 0);
635 
636   if (GET_CODE (x) == MINUS
637       && CONST_INT_P (XEXP (x, 1)))
638     return - INTVAL (XEXP (x, 1));
639   if (GET_CODE (x) == PLUS
640       && CONST_INT_P (XEXP (x, 1)))
641     return INTVAL (XEXP (x, 1));
642   return 0;
643 }
644 
645 /* If X is a constant, return the value sans apparent integer term;
646    otherwise return 0.
647    Only obvious integer terms are detected.  */
648 
649 rtx
650 get_related_value (const_rtx x)
651 {
652   if (GET_CODE (x) != CONST)
653     return 0;
654   x = XEXP (x, 0);
655   if (GET_CODE (x) == PLUS
656       && CONST_INT_P (XEXP (x, 1)))
657     return XEXP (x, 0);
658   else if (GET_CODE (x) == MINUS
659 	   && CONST_INT_P (XEXP (x, 1)))
660     return XEXP (x, 0);
661   return 0;
662 }
663 
664 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
665    to somewhere in the same object or object_block as SYMBOL.  */
666 
667 bool
668 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
669 {
670   tree decl;
671 
672   if (GET_CODE (symbol) != SYMBOL_REF)
673     return false;
674 
675   if (offset == 0)
676     return true;
677 
678   if (offset > 0)
679     {
680       if (CONSTANT_POOL_ADDRESS_P (symbol)
681 	  && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
682 	return true;
683 
684       decl = SYMBOL_REF_DECL (symbol);
685       if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
686 	return true;
687     }
688 
689   if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
690       && SYMBOL_REF_BLOCK (symbol)
691       && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
692       && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
693 	  < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
694     return true;
695 
696   return false;
697 }
698 
699 /* Split X into a base and a constant offset, storing them in *BASE_OUT
700    and *OFFSET_OUT respectively.  */
701 
702 void
703 split_const (rtx x, rtx *base_out, rtx *offset_out)
704 {
705   if (GET_CODE (x) == CONST)
706     {
707       x = XEXP (x, 0);
708       if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
709 	{
710 	  *base_out = XEXP (x, 0);
711 	  *offset_out = XEXP (x, 1);
712 	  return;
713 	}
714     }
715   *base_out = x;
716   *offset_out = const0_rtx;
717 }
718 
719 /* Return the number of places FIND appears within X.  If COUNT_DEST is
720    zero, we do not count occurrences inside the destination of a SET.  */
721 
722 int
723 count_occurrences (const_rtx x, const_rtx find, int count_dest)
724 {
725   int i, j;
726   enum rtx_code code;
727   const char *format_ptr;
728   int count;
729 
730   if (x == find)
731     return 1;
732 
733   code = GET_CODE (x);
734 
735   switch (code)
736     {
737     case REG:
738     CASE_CONST_ANY:
739     case SYMBOL_REF:
740     case CODE_LABEL:
741     case PC:
742     case CC0:
743       return 0;
744 
745     case EXPR_LIST:
746       count = count_occurrences (XEXP (x, 0), find, count_dest);
747       if (XEXP (x, 1))
748 	count += count_occurrences (XEXP (x, 1), find, count_dest);
749       return count;
750 
751     case MEM:
752       if (MEM_P (find) && rtx_equal_p (x, find))
753 	return 1;
754       break;
755 
756     case SET:
757       if (SET_DEST (x) == find && ! count_dest)
758 	return count_occurrences (SET_SRC (x), find, count_dest);
759       break;
760 
761     default:
762       break;
763     }
764 
765   format_ptr = GET_RTX_FORMAT (code);
766   count = 0;
767 
768   for (i = 0; i < GET_RTX_LENGTH (code); i++)
769     {
770       switch (*format_ptr++)
771 	{
772 	case 'e':
773 	  count += count_occurrences (XEXP (x, i), find, count_dest);
774 	  break;
775 
776 	case 'E':
777 	  for (j = 0; j < XVECLEN (x, i); j++)
778 	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
779 	  break;
780 	}
781     }
782   return count;
783 }
784 
785 
786 /* Return TRUE if OP is a register or subreg of a register that
787    holds an unsigned quantity.  Otherwise, return FALSE.  */
788 
789 bool
790 unsigned_reg_p (rtx op)
791 {
792   if (REG_P (op)
793       && REG_EXPR (op)
794       && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
795     return true;
796 
797   if (GET_CODE (op) == SUBREG
798       && SUBREG_PROMOTED_SIGN (op))
799     return true;
800 
801   return false;
802 }
803 
804 
805 /* Nonzero if register REG appears somewhere within IN.
806    Also works if REG is not a register; in this case it checks
807    for a subexpression of IN that is Lisp "equal" to REG.  */
808 
809 int
810 reg_mentioned_p (const_rtx reg, const_rtx in)
811 {
812   const char *fmt;
813   int i;
814   enum rtx_code code;
815 
816   if (in == 0)
817     return 0;
818 
819   if (reg == in)
820     return 1;
821 
822   if (GET_CODE (in) == LABEL_REF)
823     return reg == LABEL_REF_LABEL (in);
824 
825   code = GET_CODE (in);
826 
827   switch (code)
828     {
829       /* Compare registers by number.  */
830     case REG:
831       return REG_P (reg) && REGNO (in) == REGNO (reg);
832 
833       /* These codes have no constituent expressions
834 	 and are unique.  */
835     case SCRATCH:
836     case CC0:
837     case PC:
838       return 0;
839 
840     CASE_CONST_ANY:
841       /* These are kept unique for a given value.  */
842       return 0;
843 
844     default:
845       break;
846     }
847 
848   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
849     return 1;
850 
851   fmt = GET_RTX_FORMAT (code);
852 
853   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
854     {
855       if (fmt[i] == 'E')
856 	{
857 	  int j;
858 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
859 	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
860 	      return 1;
861 	}
862       else if (fmt[i] == 'e'
863 	       && reg_mentioned_p (reg, XEXP (in, i)))
864 	return 1;
865     }
866   return 0;
867 }
868 
869 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
870    no CODE_LABEL insn.  */
871 
872 int
873 no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
874 {
875   rtx_insn *p;
876   if (beg == end)
877     return 0;
878   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
879     if (LABEL_P (p))
880       return 0;
881   return 1;
882 }
883 
884 /* Nonzero if register REG is used in an insn between
885    FROM_INSN and TO_INSN (exclusive of those two).  */
886 
887 int
888 reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
889 		    const rtx_insn *to_insn)
890 {
891   rtx_insn *insn;
892 
893   if (from_insn == to_insn)
894     return 0;
895 
896   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
897     if (NONDEBUG_INSN_P (insn)
898 	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
899 	   || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
900       return 1;
901   return 0;
902 }
903 
904 /* Nonzero if the old value of X, a register, is referenced in BODY.  If X
905    is entirely replaced by a new value and the only use is as a SET_DEST,
906    we do not consider it a reference.  */
907 
908 int
909 reg_referenced_p (const_rtx x, const_rtx body)
910 {
911   int i;
912 
913   switch (GET_CODE (body))
914     {
915     case SET:
916       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
917 	return 1;
918 
919       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
920 	 of a REG that occupies all of the REG, the insn references X if
921 	 it is mentioned in the destination.  */
922       if (GET_CODE (SET_DEST (body)) != CC0
923 	  && GET_CODE (SET_DEST (body)) != PC
924 	  && !REG_P (SET_DEST (body))
925 	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
926 		&& REG_P (SUBREG_REG (SET_DEST (body)))
927 		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
928 		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
929 		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
930 			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
931 	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
932 	return 1;
933       return 0;
934 
935     case ASM_OPERANDS:
936       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
937 	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
938 	  return 1;
939       return 0;
940 
941     case CALL:
942     case USE:
943     case IF_THEN_ELSE:
944       return reg_overlap_mentioned_p (x, body);
945 
946     case TRAP_IF:
947       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
948 
949     case PREFETCH:
950       return reg_overlap_mentioned_p (x, XEXP (body, 0));
951 
952     case UNSPEC:
953     case UNSPEC_VOLATILE:
954       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
955 	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
956 	  return 1;
957       return 0;
958 
959     case PARALLEL:
960       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
961 	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
962 	  return 1;
963       return 0;
964 
965     case CLOBBER:
966       if (MEM_P (XEXP (body, 0)))
967 	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
968 	  return 1;
969       return 0;
970 
971     case COND_EXEC:
972       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
973 	return 1;
974       return reg_referenced_p (x, COND_EXEC_CODE (body));
975 
976     default:
977       return 0;
978     }
979 }
980 
981 /* Nonzero if register REG is set or clobbered in an insn between
982    FROM_INSN and TO_INSN (exclusive of those two).  */
983 
984 int
985 reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
986 		   const rtx_insn *to_insn)
987 {
988   const rtx_insn *insn;
989 
990   if (from_insn == to_insn)
991     return 0;
992 
993   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
994     if (INSN_P (insn) && reg_set_p (reg, insn))
995       return 1;
996   return 0;
997 }
998 
999 /* Internals of reg_set_between_p.  */
1000 int
1001 reg_set_p (const_rtx reg, const_rtx insn)
1002 {
1003   /* After delay slot handling, call and branch insns might be in a
1004      sequence.  Check all the elements there.  */
1005   if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1006     {
1007       for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
1008 	if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
1009 	  return true;
1010 
1011       return false;
1012     }
1013 
1014   /* We can be passed an insn or part of one.  If we are passed an insn,
1015      check if a side-effect of the insn clobbers REG.  */
1016   if (INSN_P (insn)
1017       && (FIND_REG_INC_NOTE (insn, reg)
1018 	  || (CALL_P (insn)
1019 	      && ((REG_P (reg)
1020 		   && REGNO (reg) < FIRST_PSEUDO_REGISTER
1021 		   && overlaps_hard_reg_set_p (regs_invalidated_by_call,
1022 					       GET_MODE (reg), REGNO (reg)))
1023 		  || MEM_P (reg)
1024 		  || find_reg_fusage (insn, CLOBBER, reg)))))
1025     return true;
1026 
1027   return set_of (reg, insn) != NULL_RTX;
1028 }
1029 
1030 /* Similar to reg_set_between_p, but check all registers in X.  Return 0
1031    only if none of them are modified between START and END.  Return 1 if
1032    X contains a MEM; this routine does use memory aliasing.  */
1033 
1034 int
1035 modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
1036 {
1037   const enum rtx_code code = GET_CODE (x);
1038   const char *fmt;
1039   int i, j;
1040   rtx_insn *insn;
1041 
1042   if (start == end)
1043     return 0;
1044 
1045   switch (code)
1046     {
1047     CASE_CONST_ANY:
1048     case CONST:
1049     case SYMBOL_REF:
1050     case LABEL_REF:
1051       return 0;
1052 
1053     case PC:
1054     case CC0:
1055       return 1;
1056 
1057     case MEM:
1058       if (modified_between_p (XEXP (x, 0), start, end))
1059 	return 1;
1060       if (MEM_READONLY_P (x))
1061 	return 0;
1062       for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1063 	if (memory_modified_in_insn_p (x, insn))
1064 	  return 1;
1065       return 0;
1066       break;
1067 
1068     case REG:
1069       return reg_set_between_p (x, start, end);
1070 
1071     default:
1072       break;
1073     }
1074 
1075   fmt = GET_RTX_FORMAT (code);
1076   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1077     {
1078       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1079 	return 1;
1080 
1081       else if (fmt[i] == 'E')
1082 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1083 	  if (modified_between_p (XVECEXP (x, i, j), start, end))
1084 	    return 1;
1085     }
1086 
1087   return 0;
1088 }
1089 
1090 /* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
1091    of them are modified in INSN.  Return 1 if X contains a MEM; this routine
1092    does use memory aliasing.  */
1093 
1094 int
1095 modified_in_p (const_rtx x, const_rtx insn)
1096 {
1097   const enum rtx_code code = GET_CODE (x);
1098   const char *fmt;
1099   int i, j;
1100 
1101   switch (code)
1102     {
1103     CASE_CONST_ANY:
1104     case CONST:
1105     case SYMBOL_REF:
1106     case LABEL_REF:
1107       return 0;
1108 
1109     case PC:
1110     case CC0:
1111       return 1;
1112 
1113     case MEM:
1114       if (modified_in_p (XEXP (x, 0), insn))
1115 	return 1;
1116       if (MEM_READONLY_P (x))
1117 	return 0;
1118       if (memory_modified_in_insn_p (x, insn))
1119 	return 1;
1120       return 0;
1121       break;
1122 
1123     case REG:
1124       return reg_set_p (x, insn);
1125 
1126     default:
1127       break;
1128     }
1129 
1130   fmt = GET_RTX_FORMAT (code);
1131   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1132     {
1133       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1134 	return 1;
1135 
1136       else if (fmt[i] == 'E')
1137 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1138 	  if (modified_in_p (XVECEXP (x, i, j), insn))
1139 	    return 1;
1140     }
1141 
1142   return 0;
1143 }
1144 
1145 /* Helper function for set_of.  */
1146 struct set_of_data
1147   {
1148     const_rtx found;
1149     const_rtx pat;
1150   };
1151 
1152 static void
1153 set_of_1 (rtx x, const_rtx pat, void *data1)
1154 {
1155   struct set_of_data *const data = (struct set_of_data *) (data1);
1156   if (rtx_equal_p (x, data->pat)
1157       || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1158     data->found = pat;
1159 }
1160 
1161 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1162    (either directly or via STRICT_LOW_PART and similar modifiers).  */
1163 const_rtx
1164 set_of (const_rtx pat, const_rtx insn)
1165 {
1166   struct set_of_data data;
1167   data.found = NULL_RTX;
1168   data.pat = pat;
1169   note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1170   return data.found;
1171 }
1172 
1173 /* Add all hard register in X to *PSET.  */
1174 void
1175 find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
1176 {
1177   subrtx_iterator::array_type array;
1178   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1179     {
1180       const_rtx x = *iter;
1181       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1182 	add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1183     }
1184 }
1185 
1186 /* This function, called through note_stores, collects sets and
1187    clobbers of hard registers in a HARD_REG_SET, which is pointed to
1188    by DATA.  */
1189 void
1190 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1191 {
1192   HARD_REG_SET *pset = (HARD_REG_SET *)data;
1193   if (REG_P (x) && HARD_REGISTER_P (x))
1194     add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1195 }
1196 
1197 /* Examine INSN, and compute the set of hard registers written by it.
1198    Store it in *PSET.  Should only be called after reload.  */
1199 void
1200 find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset, bool implicit)
1201 {
1202   rtx link;
1203 
1204   CLEAR_HARD_REG_SET (*pset);
1205   note_stores (PATTERN (insn), record_hard_reg_sets, pset);
1206   if (CALL_P (insn))
1207     {
1208       if (implicit)
1209 	IOR_HARD_REG_SET (*pset, call_used_reg_set);
1210 
1211       for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1212 	record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1213     }
1214   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1215     if (REG_NOTE_KIND (link) == REG_INC)
1216       record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1217 }
1218 
1219 /* Like record_hard_reg_sets, but called through note_uses.  */
1220 void
1221 record_hard_reg_uses (rtx *px, void *data)
1222 {
1223   find_all_hard_regs (*px, (HARD_REG_SET *) data);
1224 }
1225 
1226 /* Given an INSN, return a SET expression if this insn has only a single SET.
1227    It may also have CLOBBERs, USEs, or SET whose output
1228    will not be used, which we ignore.  */
1229 
1230 rtx
1231 single_set_2 (const rtx_insn *insn, const_rtx pat)
1232 {
1233   rtx set = NULL;
1234   int set_verified = 1;
1235   int i;
1236 
1237   if (GET_CODE (pat) == PARALLEL)
1238     {
1239       for (i = 0; i < XVECLEN (pat, 0); i++)
1240 	{
1241 	  rtx sub = XVECEXP (pat, 0, i);
1242 	  switch (GET_CODE (sub))
1243 	    {
1244 	    case USE:
1245 	    case CLOBBER:
1246 	      break;
1247 
1248 	    case SET:
1249 	      /* We can consider insns having multiple sets, where all
1250 		 but one are dead as single set insns.  In common case
1251 		 only single set is present in the pattern so we want
1252 		 to avoid checking for REG_UNUSED notes unless necessary.
1253 
1254 		 When we reach set first time, we just expect this is
1255 		 the single set we are looking for and only when more
1256 		 sets are found in the insn, we check them.  */
1257 	      if (!set_verified)
1258 		{
1259 		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1260 		      && !side_effects_p (set))
1261 		    set = NULL;
1262 		  else
1263 		    set_verified = 1;
1264 		}
1265 	      if (!set)
1266 		set = sub, set_verified = 0;
1267 	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1268 		       || side_effects_p (sub))
1269 		return NULL_RTX;
1270 	      break;
1271 
1272 	    default:
1273 	      return NULL_RTX;
1274 	    }
1275 	}
1276     }
1277   return set;
1278 }
1279 
1280 /* Given an INSN, return nonzero if it has more than one SET, else return
1281    zero.  */
1282 
1283 int
1284 multiple_sets (const_rtx insn)
1285 {
1286   int found;
1287   int i;
1288 
1289   /* INSN must be an insn.  */
1290   if (! INSN_P (insn))
1291     return 0;
1292 
1293   /* Only a PARALLEL can have multiple SETs.  */
1294   if (GET_CODE (PATTERN (insn)) == PARALLEL)
1295     {
1296       for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1297 	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1298 	  {
1299 	    /* If we have already found a SET, then return now.  */
1300 	    if (found)
1301 	      return 1;
1302 	    else
1303 	      found = 1;
1304 	  }
1305     }
1306 
1307   /* Either zero or one SET.  */
1308   return 0;
1309 }
1310 
1311 /* Return nonzero if the destination of SET equals the source
1312    and there are no side effects.  */
1313 
1314 int
1315 set_noop_p (const_rtx set)
1316 {
1317   rtx src = SET_SRC (set);
1318   rtx dst = SET_DEST (set);
1319 
1320   if (dst == pc_rtx && src == pc_rtx)
1321     return 1;
1322 
1323   if (MEM_P (dst) && MEM_P (src))
1324     return rtx_equal_p (dst, src) && !side_effects_p (dst);
1325 
1326   if (GET_CODE (dst) == ZERO_EXTRACT)
1327     return rtx_equal_p (XEXP (dst, 0), src)
1328 	   && !BITS_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1329 	   && !side_effects_p (src);
1330 
1331   if (GET_CODE (dst) == STRICT_LOW_PART)
1332     dst = XEXP (dst, 0);
1333 
1334   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1335     {
1336       if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1337 	return 0;
1338       src = SUBREG_REG (src);
1339       dst = SUBREG_REG (dst);
1340     }
1341 
1342   /* It is a NOOP if destination overlaps with selected src vector
1343      elements.  */
1344   if (GET_CODE (src) == VEC_SELECT
1345       && REG_P (XEXP (src, 0)) && REG_P (dst)
1346       && HARD_REGISTER_P (XEXP (src, 0))
1347       && HARD_REGISTER_P (dst))
1348     {
1349       int i;
1350       rtx par = XEXP (src, 1);
1351       rtx src0 = XEXP (src, 0);
1352       int c0 = INTVAL (XVECEXP (par, 0, 0));
1353       HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1354 
1355       for (i = 1; i < XVECLEN (par, 0); i++)
1356 	if (INTVAL (XVECEXP (par, 0, i)) != c0 + i)
1357 	  return 0;
1358       return
1359 	simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1360 			       offset, GET_MODE (dst)) == (int) REGNO (dst);
1361     }
1362 
1363   return (REG_P (src) && REG_P (dst)
1364 	  && REGNO (src) == REGNO (dst));
1365 }
1366 
1367 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1368    value to itself.  */
1369 
1370 int
1371 noop_move_p (const_rtx insn)
1372 {
1373   rtx pat = PATTERN (insn);
1374 
1375   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1376     return 1;
1377 
1378   /* Insns carrying these notes are useful later on.  */
1379   if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1380     return 0;
1381 
1382   /* Check the code to be executed for COND_EXEC.  */
1383   if (GET_CODE (pat) == COND_EXEC)
1384     pat = COND_EXEC_CODE (pat);
1385 
1386   if (GET_CODE (pat) == SET && set_noop_p (pat))
1387     return 1;
1388 
1389   if (GET_CODE (pat) == PARALLEL)
1390     {
1391       int i;
1392       /* If nothing but SETs of registers to themselves,
1393 	 this insn can also be deleted.  */
1394       for (i = 0; i < XVECLEN (pat, 0); i++)
1395 	{
1396 	  rtx tem = XVECEXP (pat, 0, i);
1397 
1398 	  if (GET_CODE (tem) == USE
1399 	      || GET_CODE (tem) == CLOBBER)
1400 	    continue;
1401 
1402 	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1403 	    return 0;
1404 	}
1405 
1406       return 1;
1407     }
1408   return 0;
1409 }
1410 
1411 
1412 /* Return nonzero if register in range [REGNO, ENDREGNO)
1413    appears either explicitly or implicitly in X
1414    other than being stored into.
1415 
1416    References contained within the substructure at LOC do not count.
1417    LOC may be zero, meaning don't ignore anything.  */
1418 
1419 bool
1420 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1421 		   rtx *loc)
1422 {
1423   int i;
1424   unsigned int x_regno;
1425   RTX_CODE code;
1426   const char *fmt;
1427 
1428  repeat:
1429   /* The contents of a REG_NONNEG note is always zero, so we must come here
1430      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1431   if (x == 0)
1432     return false;
1433 
1434   code = GET_CODE (x);
1435 
1436   switch (code)
1437     {
1438     case REG:
1439       x_regno = REGNO (x);
1440 
1441       /* If we modifying the stack, frame, or argument pointer, it will
1442 	 clobber a virtual register.  In fact, we could be more precise,
1443 	 but it isn't worth it.  */
1444       if ((x_regno == STACK_POINTER_REGNUM
1445 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1446 	   || x_regno == ARG_POINTER_REGNUM
1447 #endif
1448 	   || x_regno == FRAME_POINTER_REGNUM)
1449 	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1450 	return true;
1451 
1452       return endregno > x_regno && regno < END_REGNO (x);
1453 
1454     case SUBREG:
1455       /* If this is a SUBREG of a hard reg, we can see exactly which
1456 	 registers are being modified.  Otherwise, handle normally.  */
1457       if (REG_P (SUBREG_REG (x))
1458 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1459 	{
1460 	  unsigned int inner_regno = subreg_regno (x);
1461 	  unsigned int inner_endregno
1462 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1463 			     ? subreg_nregs (x) : 1);
1464 
1465 	  return endregno > inner_regno && regno < inner_endregno;
1466 	}
1467       break;
1468 
1469     case CLOBBER:
1470     case SET:
1471       if (&SET_DEST (x) != loc
1472 	  /* Note setting a SUBREG counts as referring to the REG it is in for
1473 	     a pseudo but not for hard registers since we can
1474 	     treat each word individually.  */
1475 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
1476 	       && loc != &SUBREG_REG (SET_DEST (x))
1477 	       && REG_P (SUBREG_REG (SET_DEST (x)))
1478 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1479 	       && refers_to_regno_p (regno, endregno,
1480 				     SUBREG_REG (SET_DEST (x)), loc))
1481 	      || (!REG_P (SET_DEST (x))
1482 		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1483 	return true;
1484 
1485       if (code == CLOBBER || loc == &SET_SRC (x))
1486 	return false;
1487       x = SET_SRC (x);
1488       goto repeat;
1489 
1490     default:
1491       break;
1492     }
1493 
1494   /* X does not match, so try its subexpressions.  */
1495 
1496   fmt = GET_RTX_FORMAT (code);
1497   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1498     {
1499       if (fmt[i] == 'e' && loc != &XEXP (x, i))
1500 	{
1501 	  if (i == 0)
1502 	    {
1503 	      x = XEXP (x, 0);
1504 	      goto repeat;
1505 	    }
1506 	  else
1507 	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1508 	      return true;
1509 	}
1510       else if (fmt[i] == 'E')
1511 	{
1512 	  int j;
1513 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1514 	    if (loc != &XVECEXP (x, i, j)
1515 		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1516 	      return true;
1517 	}
1518     }
1519   return false;
1520 }
1521 
1522 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1523    we check if any register number in X conflicts with the relevant register
1524    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1525    contains a MEM (we don't bother checking for memory addresses that can't
1526    conflict because we expect this to be a rare case.  */
1527 
1528 int
1529 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1530 {
1531   unsigned int regno, endregno;
1532 
1533   /* If either argument is a constant, then modifying X can not
1534      affect IN.  Here we look at IN, we can profitably combine
1535      CONSTANT_P (x) with the switch statement below.  */
1536   if (CONSTANT_P (in))
1537     return 0;
1538 
1539  recurse:
1540   switch (GET_CODE (x))
1541     {
1542     case STRICT_LOW_PART:
1543     case ZERO_EXTRACT:
1544     case SIGN_EXTRACT:
1545       /* Overly conservative.  */
1546       x = XEXP (x, 0);
1547       goto recurse;
1548 
1549     case SUBREG:
1550       regno = REGNO (SUBREG_REG (x));
1551       if (regno < FIRST_PSEUDO_REGISTER)
1552 	regno = subreg_regno (x);
1553       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1554 			  ? subreg_nregs (x) : 1);
1555       goto do_reg;
1556 
1557     case REG:
1558       regno = REGNO (x);
1559       endregno = END_REGNO (x);
1560     do_reg:
1561       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1562 
1563     case MEM:
1564       {
1565 	const char *fmt;
1566 	int i;
1567 
1568 	if (MEM_P (in))
1569 	  return 1;
1570 
1571 	fmt = GET_RTX_FORMAT (GET_CODE (in));
1572 	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1573 	  if (fmt[i] == 'e')
1574 	    {
1575 	      if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1576 		return 1;
1577 	    }
1578 	  else if (fmt[i] == 'E')
1579 	    {
1580 	      int j;
1581 	      for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1582 		if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1583 		  return 1;
1584 	    }
1585 
1586 	return 0;
1587       }
1588 
1589     case SCRATCH:
1590     case PC:
1591     case CC0:
1592       return reg_mentioned_p (x, in);
1593 
1594     case PARALLEL:
1595       {
1596 	int i;
1597 
1598 	/* If any register in here refers to it we return true.  */
1599 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1600 	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
1601 	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1602 	    return 1;
1603 	return 0;
1604       }
1605 
1606     default:
1607       gcc_assert (CONSTANT_P (x));
1608       return 0;
1609     }
1610 }
1611 
1612 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1613    (X would be the pattern of an insn).  DATA is an arbitrary pointer,
1614    ignored by note_stores, but passed to FUN.
1615 
1616    FUN receives three arguments:
1617    1. the REG, MEM, CC0 or PC being stored in or clobbered,
1618    2. the SET or CLOBBER rtx that does the store,
1619    3. the pointer DATA provided to note_stores.
1620 
1621   If the item being stored in or clobbered is a SUBREG of a hard register,
1622   the SUBREG will be passed.  */
1623 
1624 void
1625 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1626 {
1627   int i;
1628 
1629   if (GET_CODE (x) == COND_EXEC)
1630     x = COND_EXEC_CODE (x);
1631 
1632   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1633     {
1634       rtx dest = SET_DEST (x);
1635 
1636       while ((GET_CODE (dest) == SUBREG
1637 	      && (!REG_P (SUBREG_REG (dest))
1638 		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1639 	     || GET_CODE (dest) == ZERO_EXTRACT
1640 	     || GET_CODE (dest) == STRICT_LOW_PART)
1641 	dest = XEXP (dest, 0);
1642 
1643       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1644 	 each of whose first operand is a register.  */
1645       if (GET_CODE (dest) == PARALLEL)
1646 	{
1647 	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1648 	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1649 	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1650 	}
1651       else
1652 	(*fun) (dest, x, data);
1653     }
1654 
1655   else if (GET_CODE (x) == PARALLEL)
1656     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1657       note_stores (XVECEXP (x, 0, i), fun, data);
1658 }
1659 
1660 /* Like notes_stores, but call FUN for each expression that is being
1661    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1662    FUN for each expression, not any interior subexpressions.  FUN receives a
1663    pointer to the expression and the DATA passed to this function.
1664 
1665    Note that this is not quite the same test as that done in reg_referenced_p
1666    since that considers something as being referenced if it is being
1667    partially set, while we do not.  */
1668 
1669 void
1670 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1671 {
1672   rtx body = *pbody;
1673   int i;
1674 
1675   switch (GET_CODE (body))
1676     {
1677     case COND_EXEC:
1678       (*fun) (&COND_EXEC_TEST (body), data);
1679       note_uses (&COND_EXEC_CODE (body), fun, data);
1680       return;
1681 
1682     case PARALLEL:
1683       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1684 	note_uses (&XVECEXP (body, 0, i), fun, data);
1685       return;
1686 
1687     case SEQUENCE:
1688       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1689 	note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1690       return;
1691 
1692     case USE:
1693       (*fun) (&XEXP (body, 0), data);
1694       return;
1695 
1696     case ASM_OPERANDS:
1697       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1698 	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1699       return;
1700 
1701     case TRAP_IF:
1702       (*fun) (&TRAP_CONDITION (body), data);
1703       return;
1704 
1705     case PREFETCH:
1706       (*fun) (&XEXP (body, 0), data);
1707       return;
1708 
1709     case UNSPEC:
1710     case UNSPEC_VOLATILE:
1711       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1712 	(*fun) (&XVECEXP (body, 0, i), data);
1713       return;
1714 
1715     case CLOBBER:
1716       if (MEM_P (XEXP (body, 0)))
1717 	(*fun) (&XEXP (XEXP (body, 0), 0), data);
1718       return;
1719 
1720     case SET:
1721       {
1722 	rtx dest = SET_DEST (body);
1723 
1724 	/* For sets we replace everything in source plus registers in memory
1725 	   expression in store and operands of a ZERO_EXTRACT.  */
1726 	(*fun) (&SET_SRC (body), data);
1727 
1728 	if (GET_CODE (dest) == ZERO_EXTRACT)
1729 	  {
1730 	    (*fun) (&XEXP (dest, 1), data);
1731 	    (*fun) (&XEXP (dest, 2), data);
1732 	  }
1733 
1734 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1735 	  dest = XEXP (dest, 0);
1736 
1737 	if (MEM_P (dest))
1738 	  (*fun) (&XEXP (dest, 0), data);
1739       }
1740       return;
1741 
1742     default:
1743       /* All the other possibilities never store.  */
1744       (*fun) (pbody, data);
1745       return;
1746     }
1747 }
1748 
1749 /* Return nonzero if X's old contents don't survive after INSN.
1750    This will be true if X is (cc0) or if X is a register and
1751    X dies in INSN or because INSN entirely sets X.
1752 
1753    "Entirely set" means set directly and not through a SUBREG, or
1754    ZERO_EXTRACT, so no trace of the old contents remains.
1755    Likewise, REG_INC does not count.
1756 
1757    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1758    but for this use that makes no difference, since regs don't overlap
1759    during their lifetimes.  Therefore, this function may be used
1760    at any time after deaths have been computed.
1761 
1762    If REG is a hard reg that occupies multiple machine registers, this
1763    function will only return 1 if each of those registers will be replaced
1764    by INSN.  */
1765 
1766 int
1767 dead_or_set_p (const_rtx insn, const_rtx x)
1768 {
1769   unsigned int regno, end_regno;
1770   unsigned int i;
1771 
1772   /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1773   if (GET_CODE (x) == CC0)
1774     return 1;
1775 
1776   gcc_assert (REG_P (x));
1777 
1778   regno = REGNO (x);
1779   end_regno = END_REGNO (x);
1780   for (i = regno; i < end_regno; i++)
1781     if (! dead_or_set_regno_p (insn, i))
1782       return 0;
1783 
1784   return 1;
1785 }
1786 
1787 /* Return TRUE iff DEST is a register or subreg of a register and
1788    doesn't change the number of words of the inner register, and any
1789    part of the register is TEST_REGNO.  */
1790 
1791 static bool
1792 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1793 {
1794   unsigned int regno, endregno;
1795 
1796   if (GET_CODE (dest) == SUBREG
1797       && (((GET_MODE_SIZE (GET_MODE (dest))
1798 	    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1799 	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1800 	       + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1801     dest = SUBREG_REG (dest);
1802 
1803   if (!REG_P (dest))
1804     return false;
1805 
1806   regno = REGNO (dest);
1807   endregno = END_REGNO (dest);
1808   return (test_regno >= regno && test_regno < endregno);
1809 }
1810 
1811 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1812    any member matches the covers_regno_no_parallel_p criteria.  */
1813 
1814 static bool
1815 covers_regno_p (const_rtx dest, unsigned int test_regno)
1816 {
1817   if (GET_CODE (dest) == PARALLEL)
1818     {
1819       /* Some targets place small structures in registers for return
1820 	 values of functions, and those registers are wrapped in
1821 	 PARALLELs that we may see as the destination of a SET.  */
1822       int i;
1823 
1824       for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1825 	{
1826 	  rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1827 	  if (inner != NULL_RTX
1828 	      && covers_regno_no_parallel_p (inner, test_regno))
1829 	    return true;
1830 	}
1831 
1832       return false;
1833     }
1834   else
1835     return covers_regno_no_parallel_p (dest, test_regno);
1836 }
1837 
1838 /* Utility function for dead_or_set_p to check an individual register. */
1839 
1840 int
1841 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1842 {
1843   const_rtx pattern;
1844 
1845   /* See if there is a death note for something that includes TEST_REGNO.  */
1846   if (find_regno_note (insn, REG_DEAD, test_regno))
1847     return 1;
1848 
1849   if (CALL_P (insn)
1850       && find_regno_fusage (insn, CLOBBER, test_regno))
1851     return 1;
1852 
1853   pattern = PATTERN (insn);
1854 
1855   /* If a COND_EXEC is not executed, the value survives.  */
1856   if (GET_CODE (pattern) == COND_EXEC)
1857     return 0;
1858 
1859   if (GET_CODE (pattern) == SET)
1860     return covers_regno_p (SET_DEST (pattern), test_regno);
1861   else if (GET_CODE (pattern) == PARALLEL)
1862     {
1863       int i;
1864 
1865       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1866 	{
1867 	  rtx body = XVECEXP (pattern, 0, i);
1868 
1869 	  if (GET_CODE (body) == COND_EXEC)
1870 	    body = COND_EXEC_CODE (body);
1871 
1872 	  if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1873 	      && covers_regno_p (SET_DEST (body), test_regno))
1874 	    return 1;
1875 	}
1876     }
1877 
1878   return 0;
1879 }
1880 
1881 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1882    If DATUM is nonzero, look for one whose datum is DATUM.  */
1883 
1884 rtx
1885 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1886 {
1887   rtx link;
1888 
1889   gcc_checking_assert (insn);
1890 
1891   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1892   if (! INSN_P (insn))
1893     return 0;
1894   if (datum == 0)
1895     {
1896       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1897 	if (REG_NOTE_KIND (link) == kind)
1898 	  return link;
1899       return 0;
1900     }
1901 
1902   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1903     if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1904       return link;
1905   return 0;
1906 }
1907 
1908 /* Return the reg-note of kind KIND in insn INSN which applies to register
1909    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1910    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1911    it might be the case that the note overlaps REGNO.  */
1912 
1913 rtx
1914 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1915 {
1916   rtx link;
1917 
1918   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1919   if (! INSN_P (insn))
1920     return 0;
1921 
1922   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1923     if (REG_NOTE_KIND (link) == kind
1924 	/* Verify that it is a register, so that scratch and MEM won't cause a
1925 	   problem here.  */
1926 	&& REG_P (XEXP (link, 0))
1927 	&& REGNO (XEXP (link, 0)) <= regno
1928 	&& END_REGNO (XEXP (link, 0)) > regno)
1929       return link;
1930   return 0;
1931 }
1932 
1933 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1934    has such a note.  */
1935 
1936 rtx
1937 find_reg_equal_equiv_note (const_rtx insn)
1938 {
1939   rtx link;
1940 
1941   if (!INSN_P (insn))
1942     return 0;
1943 
1944   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1945     if (REG_NOTE_KIND (link) == REG_EQUAL
1946 	|| REG_NOTE_KIND (link) == REG_EQUIV)
1947       {
1948 	/* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1949 	   insns that have multiple sets.  Checking single_set to
1950 	   make sure of this is not the proper check, as explained
1951 	   in the comment in set_unique_reg_note.
1952 
1953 	   This should be changed into an assert.  */
1954 	if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1955 	  return 0;
1956 	return link;
1957       }
1958   return NULL;
1959 }
1960 
1961 /* Check whether INSN is a single_set whose source is known to be
1962    equivalent to a constant.  Return that constant if so, otherwise
1963    return null.  */
1964 
1965 rtx
1966 find_constant_src (const rtx_insn *insn)
1967 {
1968   rtx note, set, x;
1969 
1970   set = single_set (insn);
1971   if (set)
1972     {
1973       x = avoid_constant_pool_reference (SET_SRC (set));
1974       if (CONSTANT_P (x))
1975 	return x;
1976     }
1977 
1978   note = find_reg_equal_equiv_note (insn);
1979   if (note && CONSTANT_P (XEXP (note, 0)))
1980     return XEXP (note, 0);
1981 
1982   return NULL_RTX;
1983 }
1984 
1985 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1986    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1987 
1988 int
1989 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1990 {
1991   /* If it's not a CALL_INSN, it can't possibly have a
1992      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1993   if (!CALL_P (insn))
1994     return 0;
1995 
1996   gcc_assert (datum);
1997 
1998   if (!REG_P (datum))
1999     {
2000       rtx link;
2001 
2002       for (link = CALL_INSN_FUNCTION_USAGE (insn);
2003 	   link;
2004 	   link = XEXP (link, 1))
2005 	if (GET_CODE (XEXP (link, 0)) == code
2006 	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2007 	  return 1;
2008     }
2009   else
2010     {
2011       unsigned int regno = REGNO (datum);
2012 
2013       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2014 	 to pseudo registers, so don't bother checking.  */
2015 
2016       if (regno < FIRST_PSEUDO_REGISTER)
2017 	{
2018 	  unsigned int end_regno = END_HARD_REGNO (datum);
2019 	  unsigned int i;
2020 
2021 	  for (i = regno; i < end_regno; i++)
2022 	    if (find_regno_fusage (insn, code, i))
2023 	      return 1;
2024 	}
2025     }
2026 
2027   return 0;
2028 }
2029 
2030 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2031    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
2032 
2033 int
2034 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
2035 {
2036   rtx link;
2037 
2038   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2039      to pseudo registers, so don't bother checking.  */
2040 
2041   if (regno >= FIRST_PSEUDO_REGISTER
2042       || !CALL_P (insn) )
2043     return 0;
2044 
2045   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2046     {
2047       rtx op, reg;
2048 
2049       if (GET_CODE (op = XEXP (link, 0)) == code
2050 	  && REG_P (reg = XEXP (op, 0))
2051 	  && REGNO (reg) <= regno
2052 	  && END_HARD_REGNO (reg) > regno)
2053 	return 1;
2054     }
2055 
2056   return 0;
2057 }
2058 
2059 
2060 /* Return true if KIND is an integer REG_NOTE.  */
2061 
2062 static bool
2063 int_reg_note_p (enum reg_note kind)
2064 {
2065   return kind == REG_BR_PROB;
2066 }
2067 
2068 /* Allocate a register note with kind KIND and datum DATUM.  LIST is
2069    stored as the pointer to the next register note.  */
2070 
2071 rtx
2072 alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
2073 {
2074   rtx note;
2075 
2076   gcc_checking_assert (!int_reg_note_p (kind));
2077   switch (kind)
2078     {
2079     case REG_CC_SETTER:
2080     case REG_CC_USER:
2081     case REG_LABEL_TARGET:
2082     case REG_LABEL_OPERAND:
2083     case REG_TM:
2084       /* These types of register notes use an INSN_LIST rather than an
2085 	 EXPR_LIST, so that copying is done right and dumps look
2086 	 better.  */
2087       note = alloc_INSN_LIST (datum, list);
2088       PUT_REG_NOTE_KIND (note, kind);
2089       break;
2090 
2091     default:
2092       note = alloc_EXPR_LIST (kind, datum, list);
2093       break;
2094     }
2095 
2096   return note;
2097 }
2098 
2099 /* Add register note with kind KIND and datum DATUM to INSN.  */
2100 
2101 void
2102 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
2103 {
2104   REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
2105 }
2106 
2107 /* Add an integer register note with kind KIND and datum DATUM to INSN.  */
2108 
2109 void
2110 add_int_reg_note (rtx insn, enum reg_note kind, int datum)
2111 {
2112   gcc_checking_assert (int_reg_note_p (kind));
2113   REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
2114 				       datum, REG_NOTES (insn));
2115 }
2116 
2117 /* Add a register note like NOTE to INSN.  */
2118 
2119 void
2120 add_shallow_copy_of_reg_note (rtx insn, rtx note)
2121 {
2122   if (GET_CODE (note) == INT_LIST)
2123     add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
2124   else
2125     add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2126 }
2127 
2128 /* Remove register note NOTE from the REG_NOTES of INSN.  */
2129 
2130 void
2131 remove_note (rtx insn, const_rtx note)
2132 {
2133   rtx link;
2134 
2135   if (note == NULL_RTX)
2136     return;
2137 
2138   if (REG_NOTES (insn) == note)
2139     REG_NOTES (insn) = XEXP (note, 1);
2140   else
2141     for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2142       if (XEXP (link, 1) == note)
2143 	{
2144 	  XEXP (link, 1) = XEXP (note, 1);
2145 	  break;
2146 	}
2147 
2148   switch (REG_NOTE_KIND (note))
2149     {
2150     case REG_EQUAL:
2151     case REG_EQUIV:
2152       df_notes_rescan (as_a <rtx_insn *> (insn));
2153       break;
2154     default:
2155       break;
2156     }
2157 }
2158 
2159 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.  */
2160 
2161 void
2162 remove_reg_equal_equiv_notes (rtx insn)
2163 {
2164   rtx *loc;
2165 
2166   loc = &REG_NOTES (insn);
2167   while (*loc)
2168     {
2169       enum reg_note kind = REG_NOTE_KIND (*loc);
2170       if (kind == REG_EQUAL || kind == REG_EQUIV)
2171 	*loc = XEXP (*loc, 1);
2172       else
2173 	loc = &XEXP (*loc, 1);
2174     }
2175 }
2176 
2177 /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO.  */
2178 
2179 void
2180 remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2181 {
2182   df_ref eq_use;
2183 
2184   if (!df)
2185     return;
2186 
2187   /* This loop is a little tricky.  We cannot just go down the chain because
2188      it is being modified by some actions in the loop.  So we just iterate
2189      over the head.  We plan to drain the list anyway.  */
2190   while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2191     {
2192       rtx_insn *insn = DF_REF_INSN (eq_use);
2193       rtx note = find_reg_equal_equiv_note (insn);
2194 
2195       /* This assert is generally triggered when someone deletes a REG_EQUAL
2196 	 or REG_EQUIV note by hacking the list manually rather than calling
2197 	 remove_note.  */
2198       gcc_assert (note);
2199 
2200       remove_note (insn, note);
2201     }
2202 }
2203 
2204 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2205    return 1 if it is found.  A simple equality test is used to determine if
2206    NODE matches.  */
2207 
2208 int
2209 in_expr_list_p (const_rtx listp, const_rtx node)
2210 {
2211   const_rtx x;
2212 
2213   for (x = listp; x; x = XEXP (x, 1))
2214     if (node == XEXP (x, 0))
2215       return 1;
2216 
2217   return 0;
2218 }
2219 
2220 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2221    remove that entry from the list if it is found.
2222 
2223    A simple equality test is used to determine if NODE matches.  */
2224 
2225 void
2226 remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp)
2227 {
2228   rtx_expr_list *temp = *listp;
2229   rtx prev = NULL_RTX;
2230 
2231   while (temp)
2232     {
2233       if (node == temp->element ())
2234 	{
2235 	  /* Splice the node out of the list.  */
2236 	  if (prev)
2237 	    XEXP (prev, 1) = temp->next ();
2238 	  else
2239 	    *listp = temp->next ();
2240 
2241 	  return;
2242 	}
2243 
2244       prev = temp;
2245       temp = temp->next ();
2246     }
2247 }
2248 
2249 /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
2250    remove that entry from the list if it is found.
2251 
2252    A simple equality test is used to determine if NODE matches.  */
2253 
2254 void
2255 remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
2256 {
2257   rtx_insn_list *temp = *listp;
2258   rtx prev = NULL;
2259 
2260   while (temp)
2261     {
2262       if (node == temp->insn ())
2263 	{
2264 	  /* Splice the node out of the list.  */
2265 	  if (prev)
2266 	    XEXP (prev, 1) = temp->next ();
2267 	  else
2268 	    *listp = temp->next ();
2269 
2270 	  return;
2271 	}
2272 
2273       prev = temp;
2274       temp = temp->next ();
2275     }
2276 }
2277 
2278 /* Nonzero if X contains any volatile instructions.  These are instructions
2279    which may cause unpredictable machine state instructions, and thus no
2280    instructions or register uses should be moved or combined across them.
2281    This includes only volatile asms and UNSPEC_VOLATILE instructions.  */
2282 
2283 int
2284 volatile_insn_p (const_rtx x)
2285 {
2286   const RTX_CODE code = GET_CODE (x);
2287   switch (code)
2288     {
2289     case LABEL_REF:
2290     case SYMBOL_REF:
2291     case CONST:
2292     CASE_CONST_ANY:
2293     case CC0:
2294     case PC:
2295     case REG:
2296     case SCRATCH:
2297     case CLOBBER:
2298     case ADDR_VEC:
2299     case ADDR_DIFF_VEC:
2300     case CALL:
2301     case MEM:
2302       return 0;
2303 
2304     case UNSPEC_VOLATILE:
2305       return 1;
2306 
2307     case ASM_INPUT:
2308     case ASM_OPERANDS:
2309       if (MEM_VOLATILE_P (x))
2310 	return 1;
2311 
2312     default:
2313       break;
2314     }
2315 
2316   /* Recursively scan the operands of this expression.  */
2317 
2318   {
2319     const char *const fmt = GET_RTX_FORMAT (code);
2320     int i;
2321 
2322     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2323       {
2324 	if (fmt[i] == 'e')
2325 	  {
2326 	    if (volatile_insn_p (XEXP (x, i)))
2327 	      return 1;
2328 	  }
2329 	else if (fmt[i] == 'E')
2330 	  {
2331 	    int j;
2332 	    for (j = 0; j < XVECLEN (x, i); j++)
2333 	      if (volatile_insn_p (XVECEXP (x, i, j)))
2334 		return 1;
2335 	  }
2336       }
2337   }
2338   return 0;
2339 }
2340 
2341 /* Nonzero if X contains any volatile memory references
2342    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
2343 
2344 int
2345 volatile_refs_p (const_rtx x)
2346 {
2347   const RTX_CODE code = GET_CODE (x);
2348   switch (code)
2349     {
2350     case LABEL_REF:
2351     case SYMBOL_REF:
2352     case CONST:
2353     CASE_CONST_ANY:
2354     case CC0:
2355     case PC:
2356     case REG:
2357     case SCRATCH:
2358     case CLOBBER:
2359     case ADDR_VEC:
2360     case ADDR_DIFF_VEC:
2361       return 0;
2362 
2363     case UNSPEC_VOLATILE:
2364       return 1;
2365 
2366     case MEM:
2367     case ASM_INPUT:
2368     case ASM_OPERANDS:
2369       if (MEM_VOLATILE_P (x))
2370 	return 1;
2371 
2372     default:
2373       break;
2374     }
2375 
2376   /* Recursively scan the operands of this expression.  */
2377 
2378   {
2379     const char *const fmt = GET_RTX_FORMAT (code);
2380     int i;
2381 
2382     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2383       {
2384 	if (fmt[i] == 'e')
2385 	  {
2386 	    if (volatile_refs_p (XEXP (x, i)))
2387 	      return 1;
2388 	  }
2389 	else if (fmt[i] == 'E')
2390 	  {
2391 	    int j;
2392 	    for (j = 0; j < XVECLEN (x, i); j++)
2393 	      if (volatile_refs_p (XVECEXP (x, i, j)))
2394 		return 1;
2395 	  }
2396       }
2397   }
2398   return 0;
2399 }
2400 
2401 /* Similar to above, except that it also rejects register pre- and post-
2402    incrementing.  */
2403 
2404 int
2405 side_effects_p (const_rtx x)
2406 {
2407   const RTX_CODE code = GET_CODE (x);
2408   switch (code)
2409     {
2410     case LABEL_REF:
2411     case SYMBOL_REF:
2412     case CONST:
2413     CASE_CONST_ANY:
2414     case CC0:
2415     case PC:
2416     case REG:
2417     case SCRATCH:
2418     case ADDR_VEC:
2419     case ADDR_DIFF_VEC:
2420     case VAR_LOCATION:
2421       return 0;
2422 
2423     case CLOBBER:
2424       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2425 	 when some combination can't be done.  If we see one, don't think
2426 	 that we can simplify the expression.  */
2427       return (GET_MODE (x) != VOIDmode);
2428 
2429     case PRE_INC:
2430     case PRE_DEC:
2431     case POST_INC:
2432     case POST_DEC:
2433     case PRE_MODIFY:
2434     case POST_MODIFY:
2435     case CALL:
2436     case UNSPEC_VOLATILE:
2437       return 1;
2438 
2439     case MEM:
2440     case ASM_INPUT:
2441     case ASM_OPERANDS:
2442       if (MEM_VOLATILE_P (x))
2443 	return 1;
2444 
2445     default:
2446       break;
2447     }
2448 
2449   /* Recursively scan the operands of this expression.  */
2450 
2451   {
2452     const char *fmt = GET_RTX_FORMAT (code);
2453     int i;
2454 
2455     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2456       {
2457 	if (fmt[i] == 'e')
2458 	  {
2459 	    if (side_effects_p (XEXP (x, i)))
2460 	      return 1;
2461 	  }
2462 	else if (fmt[i] == 'E')
2463 	  {
2464 	    int j;
2465 	    for (j = 0; j < XVECLEN (x, i); j++)
2466 	      if (side_effects_p (XVECEXP (x, i, j)))
2467 		return 1;
2468 	  }
2469       }
2470   }
2471   return 0;
2472 }
2473 
2474 /* Return nonzero if evaluating rtx X might cause a trap.
2475    FLAGS controls how to consider MEMs.  A nonzero means the context
2476    of the access may have changed from the original, such that the
2477    address may have become invalid.  */
2478 
2479 int
2480 may_trap_p_1 (const_rtx x, unsigned flags)
2481 {
2482   int i;
2483   enum rtx_code code;
2484   const char *fmt;
2485 
2486   /* We make no distinction currently, but this function is part of
2487      the internal target-hooks ABI so we keep the parameter as
2488      "unsigned flags".  */
2489   bool code_changed = flags != 0;
2490 
2491   if (x == 0)
2492     return 0;
2493   code = GET_CODE (x);
2494   switch (code)
2495     {
2496       /* Handle these cases quickly.  */
2497     CASE_CONST_ANY:
2498     case SYMBOL_REF:
2499     case LABEL_REF:
2500     case CONST:
2501     case PC:
2502     case CC0:
2503     case REG:
2504     case SCRATCH:
2505       return 0;
2506 
2507     case UNSPEC:
2508       return targetm.unspec_may_trap_p (x, flags);
2509 
2510     case UNSPEC_VOLATILE:
2511     case ASM_INPUT:
2512     case TRAP_IF:
2513       return 1;
2514 
2515     case ASM_OPERANDS:
2516       return MEM_VOLATILE_P (x);
2517 
2518       /* Memory ref can trap unless it's a static var or a stack slot.  */
2519     case MEM:
2520       /* Recognize specific pattern of stack checking probes.  */
2521       if (flag_stack_check
2522 	  && MEM_VOLATILE_P (x)
2523 	  && XEXP (x, 0) == stack_pointer_rtx)
2524 	return 1;
2525       if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2526 	     reference; moving it out of context such as when moving code
2527 	     when optimizing, might cause its address to become invalid.  */
2528 	  code_changed
2529 	  || !MEM_NOTRAP_P (x))
2530 	{
2531 	  HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
2532 	  return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2533 					GET_MODE (x), code_changed);
2534 	}
2535 
2536       return 0;
2537 
2538       /* Division by a non-constant might trap.  */
2539     case DIV:
2540     case MOD:
2541     case UDIV:
2542     case UMOD:
2543       if (HONOR_SNANS (x))
2544 	return 1;
2545       if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2546 	return flag_trapping_math;
2547       if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2548 	return 1;
2549       break;
2550 
2551     case EXPR_LIST:
2552       /* An EXPR_LIST is used to represent a function call.  This
2553 	 certainly may trap.  */
2554       return 1;
2555 
2556     case GE:
2557     case GT:
2558     case LE:
2559     case LT:
2560     case LTGT:
2561     case COMPARE:
2562       /* Some floating point comparisons may trap.  */
2563       if (!flag_trapping_math)
2564 	break;
2565       /* ??? There is no machine independent way to check for tests that trap
2566 	 when COMPARE is used, though many targets do make this distinction.
2567 	 For instance, sparc uses CCFPE for compares which generate exceptions
2568 	 and CCFP for compares which do not generate exceptions.  */
2569       if (HONOR_NANS (x))
2570 	return 1;
2571       /* But often the compare has some CC mode, so check operand
2572 	 modes as well.  */
2573       if (HONOR_NANS (XEXP (x, 0))
2574 	  || HONOR_NANS (XEXP (x, 1)))
2575 	return 1;
2576       break;
2577 
2578     case EQ:
2579     case NE:
2580       if (HONOR_SNANS (x))
2581 	return 1;
2582       /* Often comparison is CC mode, so check operand modes.  */
2583       if (HONOR_SNANS (XEXP (x, 0))
2584 	  || HONOR_SNANS (XEXP (x, 1)))
2585 	return 1;
2586       break;
2587 
2588     case FIX:
2589       /* Conversion of floating point might trap.  */
2590       if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
2591 	return 1;
2592       break;
2593 
2594     case NEG:
2595     case ABS:
2596     case SUBREG:
2597       /* These operations don't trap even with floating point.  */
2598       break;
2599 
2600     default:
2601       /* Any floating arithmetic may trap.  */
2602       if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
2603 	return 1;
2604     }
2605 
2606   fmt = GET_RTX_FORMAT (code);
2607   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2608     {
2609       if (fmt[i] == 'e')
2610 	{
2611 	  if (may_trap_p_1 (XEXP (x, i), flags))
2612 	    return 1;
2613 	}
2614       else if (fmt[i] == 'E')
2615 	{
2616 	  int j;
2617 	  for (j = 0; j < XVECLEN (x, i); j++)
2618 	    if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2619 	      return 1;
2620 	}
2621     }
2622   return 0;
2623 }
2624 
2625 /* Return nonzero if evaluating rtx X might cause a trap.  */
2626 
2627 int
2628 may_trap_p (const_rtx x)
2629 {
2630   return may_trap_p_1 (x, 0);
2631 }
2632 
2633 /* Same as above, but additionally return nonzero if evaluating rtx X might
2634    cause a fault.  We define a fault for the purpose of this function as a
2635    erroneous execution condition that cannot be encountered during the normal
2636    execution of a valid program; the typical example is an unaligned memory
2637    access on a strict alignment machine.  The compiler guarantees that it
2638    doesn't generate code that will fault from a valid program, but this
2639    guarantee doesn't mean anything for individual instructions.  Consider
2640    the following example:
2641 
2642       struct S { int d; union { char *cp; int *ip; }; };
2643 
2644       int foo(struct S *s)
2645       {
2646 	if (s->d == 1)
2647 	  return *s->ip;
2648 	else
2649 	  return *s->cp;
2650       }
2651 
2652    on a strict alignment machine.  In a valid program, foo will never be
2653    invoked on a structure for which d is equal to 1 and the underlying
2654    unique field of the union not aligned on a 4-byte boundary, but the
2655    expression *s->ip might cause a fault if considered individually.
2656 
2657    At the RTL level, potentially problematic expressions will almost always
2658    verify may_trap_p; for example, the above dereference can be emitted as
2659    (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2660    However, suppose that foo is inlined in a caller that causes s->cp to
2661    point to a local character variable and guarantees that s->d is not set
2662    to 1; foo may have been effectively translated into pseudo-RTL as:
2663 
2664       if ((reg:SI) == 1)
2665 	(set (reg:SI) (mem:SI (%fp - 7)))
2666       else
2667 	(set (reg:QI) (mem:QI (%fp - 7)))
2668 
2669    Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2670    memory reference to a stack slot, but it will certainly cause a fault
2671    on a strict alignment machine.  */
2672 
2673 int
2674 may_trap_or_fault_p (const_rtx x)
2675 {
2676   return may_trap_p_1 (x, 1);
2677 }
2678 
2679 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2680    i.e., an inequality.  */
2681 
2682 int
2683 inequality_comparisons_p (const_rtx x)
2684 {
2685   const char *fmt;
2686   int len, i;
2687   const enum rtx_code code = GET_CODE (x);
2688 
2689   switch (code)
2690     {
2691     case REG:
2692     case SCRATCH:
2693     case PC:
2694     case CC0:
2695     CASE_CONST_ANY:
2696     case CONST:
2697     case LABEL_REF:
2698     case SYMBOL_REF:
2699       return 0;
2700 
2701     case LT:
2702     case LTU:
2703     case GT:
2704     case GTU:
2705     case LE:
2706     case LEU:
2707     case GE:
2708     case GEU:
2709       return 1;
2710 
2711     default:
2712       break;
2713     }
2714 
2715   len = GET_RTX_LENGTH (code);
2716   fmt = GET_RTX_FORMAT (code);
2717 
2718   for (i = 0; i < len; i++)
2719     {
2720       if (fmt[i] == 'e')
2721 	{
2722 	  if (inequality_comparisons_p (XEXP (x, i)))
2723 	    return 1;
2724 	}
2725       else if (fmt[i] == 'E')
2726 	{
2727 	  int j;
2728 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2729 	    if (inequality_comparisons_p (XVECEXP (x, i, j)))
2730 	      return 1;
2731 	}
2732     }
2733 
2734   return 0;
2735 }
2736 
2737 /* Replace any occurrence of FROM in X with TO.  The function does
2738    not enter into CONST_DOUBLE for the replace.
2739 
2740    Note that copying is not done so X must not be shared unless all copies
2741    are to be modified.  */
2742 
2743 rtx
2744 replace_rtx (rtx x, rtx from, rtx to)
2745 {
2746   int i, j;
2747   const char *fmt;
2748 
2749   if (x == from)
2750     return to;
2751 
2752   /* Allow this function to make replacements in EXPR_LISTs.  */
2753   if (x == 0)
2754     return 0;
2755 
2756   if (GET_CODE (x) == SUBREG)
2757     {
2758       rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2759 
2760       if (CONST_INT_P (new_rtx))
2761 	{
2762 	  x = simplify_subreg (GET_MODE (x), new_rtx,
2763 			       GET_MODE (SUBREG_REG (x)),
2764 			       SUBREG_BYTE (x));
2765 	  gcc_assert (x);
2766 	}
2767       else
2768 	SUBREG_REG (x) = new_rtx;
2769 
2770       return x;
2771     }
2772   else if (GET_CODE (x) == ZERO_EXTEND)
2773     {
2774       rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2775 
2776       if (CONST_INT_P (new_rtx))
2777 	{
2778 	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2779 					new_rtx, GET_MODE (XEXP (x, 0)));
2780 	  gcc_assert (x);
2781 	}
2782       else
2783 	XEXP (x, 0) = new_rtx;
2784 
2785       return x;
2786     }
2787 
2788   fmt = GET_RTX_FORMAT (GET_CODE (x));
2789   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2790     {
2791       if (fmt[i] == 'e')
2792 	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2793       else if (fmt[i] == 'E')
2794 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2795 	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2796     }
2797 
2798   return x;
2799 }
2800 
2801 /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL.  Also track
2802    the change in LABEL_NUSES if UPDATE_LABEL_NUSES.  */
2803 
2804 void
2805 replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
2806 {
2807   /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long.  */
2808   rtx x = *loc;
2809   if (JUMP_TABLE_DATA_P (x))
2810     {
2811       x = PATTERN (x);
2812       rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
2813       int len = GET_NUM_ELEM (vec);
2814       for (int i = 0; i < len; ++i)
2815 	{
2816 	  rtx ref = RTVEC_ELT (vec, i);
2817 	  if (XEXP (ref, 0) == old_label)
2818 	    {
2819 	      XEXP (ref, 0) = new_label;
2820 	      if (update_label_nuses)
2821 		{
2822 		  ++LABEL_NUSES (new_label);
2823 		  --LABEL_NUSES (old_label);
2824 		}
2825 	    }
2826 	}
2827       return;
2828     }
2829 
2830   /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2831      field.  This is not handled by the iterator because it doesn't
2832      handle unprinted ('0') fields.  */
2833   if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
2834     JUMP_LABEL (x) = new_label;
2835 
2836   subrtx_ptr_iterator::array_type array;
2837   FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
2838     {
2839       rtx *loc = *iter;
2840       if (rtx x = *loc)
2841 	{
2842 	  if (GET_CODE (x) == SYMBOL_REF
2843 	      && CONSTANT_POOL_ADDRESS_P (x))
2844 	    {
2845 	      rtx c = get_pool_constant (x);
2846 	      if (rtx_referenced_p (old_label, c))
2847 		{
2848 		  /* Create a copy of constant C; replace the label inside
2849 		     but do not update LABEL_NUSES because uses in constant pool
2850 		     are not counted.  */
2851 		  rtx new_c = copy_rtx (c);
2852 		  replace_label (&new_c, old_label, new_label, false);
2853 
2854 		  /* Add the new constant NEW_C to constant pool and replace
2855 		     the old reference to constant by new reference.  */
2856 		  rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
2857 		  *loc = replace_rtx (x, x, XEXP (new_mem, 0));
2858 		}
2859 	    }
2860 
2861 	  if ((GET_CODE (x) == LABEL_REF
2862 	       || GET_CODE (x) == INSN_LIST)
2863 	      && XEXP (x, 0) == old_label)
2864 	    {
2865 	      XEXP (x, 0) = new_label;
2866 	      if (update_label_nuses)
2867 		{
2868 		  ++LABEL_NUSES (new_label);
2869 		  --LABEL_NUSES (old_label);
2870 		}
2871 	    }
2872 	}
2873     }
2874 }
2875 
2876 void
2877 replace_label_in_insn (rtx_insn *insn, rtx old_label, rtx new_label,
2878 		       bool update_label_nuses)
2879 {
2880   rtx insn_as_rtx = insn;
2881   replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
2882   gcc_checking_assert (insn_as_rtx == insn);
2883 }
2884 
2885 /* Return true if X is referenced in BODY.  */
2886 
2887 bool
2888 rtx_referenced_p (const_rtx x, const_rtx body)
2889 {
2890   subrtx_iterator::array_type array;
2891   FOR_EACH_SUBRTX (iter, array, body, ALL)
2892     if (const_rtx y = *iter)
2893       {
2894 	/* Check if a label_ref Y refers to label X.  */
2895 	if (GET_CODE (y) == LABEL_REF
2896 	    && LABEL_P (x)
2897 	    && LABEL_REF_LABEL (y) == x)
2898 	  return true;
2899 
2900 	if (rtx_equal_p (x, y))
2901 	  return true;
2902 
2903 	/* If Y is a reference to pool constant traverse the constant.  */
2904 	if (GET_CODE (y) == SYMBOL_REF
2905 	    && CONSTANT_POOL_ADDRESS_P (y))
2906 	  iter.substitute (get_pool_constant (y));
2907       }
2908   return false;
2909 }
2910 
2911 /* If INSN is a tablejump return true and store the label (before jump table) to
2912    *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2913 
2914 bool
2915 tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep)
2916 {
2917   rtx label, table;
2918 
2919   if (!JUMP_P (insn))
2920     return false;
2921 
2922   label = JUMP_LABEL (insn);
2923   if (label != NULL_RTX && !ANY_RETURN_P (label)
2924       && (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX
2925       && JUMP_TABLE_DATA_P (table))
2926     {
2927       if (labelp)
2928 	*labelp = label;
2929       if (tablep)
2930 	*tablep = as_a <rtx_jump_table_data *> (table);
2931       return true;
2932     }
2933   return false;
2934 }
2935 
2936 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2937    constant that is not in the constant pool and not in the condition
2938    of an IF_THEN_ELSE.  */
2939 
2940 static int
2941 computed_jump_p_1 (const_rtx x)
2942 {
2943   const enum rtx_code code = GET_CODE (x);
2944   int i, j;
2945   const char *fmt;
2946 
2947   switch (code)
2948     {
2949     case LABEL_REF:
2950     case PC:
2951       return 0;
2952 
2953     case CONST:
2954     CASE_CONST_ANY:
2955     case SYMBOL_REF:
2956     case REG:
2957       return 1;
2958 
2959     case MEM:
2960       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2961 		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2962 
2963     case IF_THEN_ELSE:
2964       return (computed_jump_p_1 (XEXP (x, 1))
2965 	      || computed_jump_p_1 (XEXP (x, 2)));
2966 
2967     default:
2968       break;
2969     }
2970 
2971   fmt = GET_RTX_FORMAT (code);
2972   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2973     {
2974       if (fmt[i] == 'e'
2975 	  && computed_jump_p_1 (XEXP (x, i)))
2976 	return 1;
2977 
2978       else if (fmt[i] == 'E')
2979 	for (j = 0; j < XVECLEN (x, i); j++)
2980 	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
2981 	    return 1;
2982     }
2983 
2984   return 0;
2985 }
2986 
2987 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2988 
2989    Tablejumps and casesi insns are not considered indirect jumps;
2990    we can recognize them by a (use (label_ref)).  */
2991 
2992 int
2993 computed_jump_p (const_rtx insn)
2994 {
2995   int i;
2996   if (JUMP_P (insn))
2997     {
2998       rtx pat = PATTERN (insn);
2999 
3000       /* If we have a JUMP_LABEL set, we're not a computed jump.  */
3001       if (JUMP_LABEL (insn) != NULL)
3002 	return 0;
3003 
3004       if (GET_CODE (pat) == PARALLEL)
3005 	{
3006 	  int len = XVECLEN (pat, 0);
3007 	  int has_use_labelref = 0;
3008 
3009 	  for (i = len - 1; i >= 0; i--)
3010 	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3011 		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3012 		    == LABEL_REF))
3013 	      {
3014 	        has_use_labelref = 1;
3015 	        break;
3016 	      }
3017 
3018 	  if (! has_use_labelref)
3019 	    for (i = len - 1; i >= 0; i--)
3020 	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3021 		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
3022 		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
3023 		return 1;
3024 	}
3025       else if (GET_CODE (pat) == SET
3026 	       && SET_DEST (pat) == pc_rtx
3027 	       && computed_jump_p_1 (SET_SRC (pat)))
3028 	return 1;
3029     }
3030   return 0;
3031 }
3032 
3033 
3034 
3035 /* MEM has a PRE/POST-INC/DEC/MODIFY address X.  Extract the operands of
3036    the equivalent add insn and pass the result to FN, using DATA as the
3037    final argument.  */
3038 
3039 static int
3040 for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
3041 {
3042   rtx x = XEXP (mem, 0);
3043   switch (GET_CODE (x))
3044     {
3045     case PRE_INC:
3046     case POST_INC:
3047       {
3048 	int size = GET_MODE_SIZE (GET_MODE (mem));
3049 	rtx r1 = XEXP (x, 0);
3050 	rtx c = gen_int_mode (size, GET_MODE (r1));
3051 	return fn (mem, x, r1, r1, c, data);
3052       }
3053 
3054     case PRE_DEC:
3055     case POST_DEC:
3056       {
3057 	int size = GET_MODE_SIZE (GET_MODE (mem));
3058 	rtx r1 = XEXP (x, 0);
3059 	rtx c = gen_int_mode (-size, GET_MODE (r1));
3060 	return fn (mem, x, r1, r1, c, data);
3061       }
3062 
3063     case PRE_MODIFY:
3064     case POST_MODIFY:
3065       {
3066 	rtx r1 = XEXP (x, 0);
3067 	rtx add = XEXP (x, 1);
3068 	return fn (mem, x, r1, add, NULL, data);
3069       }
3070 
3071     default:
3072       gcc_unreachable ();
3073     }
3074 }
3075 
3076 /* Traverse *LOC looking for MEMs that have autoinc addresses.
3077    For each such autoinc operation found, call FN, passing it
3078    the innermost enclosing MEM, the operation itself, the RTX modified
3079    by the operation, two RTXs (the second may be NULL) that, once
3080    added, represent the value to be held by the modified RTX
3081    afterwards, and DATA.  FN is to return 0 to continue the
3082    traversal or any other value to have it returned to the caller of
3083    for_each_inc_dec.  */
3084 
3085 int
3086 for_each_inc_dec (rtx x,
3087 		  for_each_inc_dec_fn fn,
3088 		  void *data)
3089 {
3090   subrtx_var_iterator::array_type array;
3091   FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
3092     {
3093       rtx mem = *iter;
3094       if (mem
3095 	  && MEM_P (mem)
3096 	  && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
3097 	{
3098 	  int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
3099 	  if (res != 0)
3100 	    return res;
3101 	  iter.skip_subrtxes ();
3102 	}
3103     }
3104   return 0;
3105 }
3106 
3107 
3108 /* Searches X for any reference to REGNO, returning the rtx of the
3109    reference found if any.  Otherwise, returns NULL_RTX.  */
3110 
3111 rtx
3112 regno_use_in (unsigned int regno, rtx x)
3113 {
3114   const char *fmt;
3115   int i, j;
3116   rtx tem;
3117 
3118   if (REG_P (x) && REGNO (x) == regno)
3119     return x;
3120 
3121   fmt = GET_RTX_FORMAT (GET_CODE (x));
3122   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3123     {
3124       if (fmt[i] == 'e')
3125 	{
3126 	  if ((tem = regno_use_in (regno, XEXP (x, i))))
3127 	    return tem;
3128 	}
3129       else if (fmt[i] == 'E')
3130 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3131 	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3132 	    return tem;
3133     }
3134 
3135   return NULL_RTX;
3136 }
3137 
3138 /* Return a value indicating whether OP, an operand of a commutative
3139    operation, is preferred as the first or second operand.  The higher
3140    the value, the stronger the preference for being the first operand.
3141    We use negative values to indicate a preference for the first operand
3142    and positive values for the second operand.  */
3143 
3144 int
3145 commutative_operand_precedence (rtx op)
3146 {
3147   enum rtx_code code = GET_CODE (op);
3148 
3149   /* Constants always come the second operand.  Prefer "nice" constants.  */
3150   if (code == CONST_INT)
3151     return -8;
3152   if (code == CONST_WIDE_INT)
3153     return -8;
3154   if (code == CONST_DOUBLE)
3155     return -7;
3156   if (code == CONST_FIXED)
3157     return -7;
3158   op = avoid_constant_pool_reference (op);
3159   code = GET_CODE (op);
3160 
3161   switch (GET_RTX_CLASS (code))
3162     {
3163     case RTX_CONST_OBJ:
3164       if (code == CONST_INT)
3165         return -6;
3166       if (code == CONST_WIDE_INT)
3167         return -6;
3168       if (code == CONST_DOUBLE)
3169         return -5;
3170       if (code == CONST_FIXED)
3171         return -5;
3172       return -4;
3173 
3174     case RTX_EXTRA:
3175       /* SUBREGs of objects should come second.  */
3176       if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3177         return -3;
3178       return 0;
3179 
3180     case RTX_OBJ:
3181       /* Complex expressions should be the first, so decrease priority
3182          of objects.  Prefer pointer objects over non pointer objects.  */
3183       if ((REG_P (op) && REG_POINTER (op))
3184 	  || (MEM_P (op) && MEM_POINTER (op)))
3185 	return -1;
3186       return -2;
3187 
3188     case RTX_COMM_ARITH:
3189       /* Prefer operands that are themselves commutative to be first.
3190          This helps to make things linear.  In particular,
3191          (and (and (reg) (reg)) (not (reg))) is canonical.  */
3192       return 4;
3193 
3194     case RTX_BIN_ARITH:
3195       /* If only one operand is a binary expression, it will be the first
3196          operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
3197          is canonical, although it will usually be further simplified.  */
3198       return 2;
3199 
3200     case RTX_UNARY:
3201       /* Then prefer NEG and NOT.  */
3202       if (code == NEG || code == NOT)
3203         return 1;
3204 
3205     default:
3206       return 0;
3207     }
3208 }
3209 
3210 /* Return 1 iff it is necessary to swap operands of commutative operation
3211    in order to canonicalize expression.  */
3212 
3213 bool
3214 swap_commutative_operands_p (rtx x, rtx y)
3215 {
3216   return (commutative_operand_precedence (x)
3217 	  < commutative_operand_precedence (y));
3218 }
3219 
3220 /* Return 1 if X is an autoincrement side effect and the register is
3221    not the stack pointer.  */
3222 int
3223 auto_inc_p (const_rtx x)
3224 {
3225   switch (GET_CODE (x))
3226     {
3227     case PRE_INC:
3228     case POST_INC:
3229     case PRE_DEC:
3230     case POST_DEC:
3231     case PRE_MODIFY:
3232     case POST_MODIFY:
3233       /* There are no REG_INC notes for SP.  */
3234       if (XEXP (x, 0) != stack_pointer_rtx)
3235 	return 1;
3236     default:
3237       break;
3238     }
3239   return 0;
3240 }
3241 
3242 /* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
3243 int
3244 loc_mentioned_in_p (rtx *loc, const_rtx in)
3245 {
3246   enum rtx_code code;
3247   const char *fmt;
3248   int i, j;
3249 
3250   if (!in)
3251     return 0;
3252 
3253   code = GET_CODE (in);
3254   fmt = GET_RTX_FORMAT (code);
3255   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3256     {
3257       if (fmt[i] == 'e')
3258 	{
3259 	  if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3260 	    return 1;
3261 	}
3262       else if (fmt[i] == 'E')
3263 	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3264 	  if (loc == &XVECEXP (in, i, j)
3265 	      || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3266 	    return 1;
3267     }
3268   return 0;
3269 }
3270 
3271 /* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
3272    and SUBREG_BYTE, return the bit offset where the subreg begins
3273    (counting from the least significant bit of the operand).  */
3274 
3275 unsigned int
3276 subreg_lsb_1 (machine_mode outer_mode,
3277 	      machine_mode inner_mode,
3278 	      unsigned int subreg_byte)
3279 {
3280   unsigned int bitpos;
3281   unsigned int byte;
3282   unsigned int word;
3283 
3284   /* A paradoxical subreg begins at bit position 0.  */
3285   if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
3286     return 0;
3287 
3288   if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3289     /* If the subreg crosses a word boundary ensure that
3290        it also begins and ends on a word boundary.  */
3291     gcc_assert (!((subreg_byte % UNITS_PER_WORD
3292 		  + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3293 		  && (subreg_byte % UNITS_PER_WORD
3294 		      || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3295 
3296   if (WORDS_BIG_ENDIAN)
3297     word = (GET_MODE_SIZE (inner_mode)
3298 	    - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3299   else
3300     word = subreg_byte / UNITS_PER_WORD;
3301   bitpos = word * BITS_PER_WORD;
3302 
3303   if (BYTES_BIG_ENDIAN)
3304     byte = (GET_MODE_SIZE (inner_mode)
3305 	    - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3306   else
3307     byte = subreg_byte % UNITS_PER_WORD;
3308   bitpos += byte * BITS_PER_UNIT;
3309 
3310   return bitpos;
3311 }
3312 
3313 /* Given a subreg X, return the bit offset where the subreg begins
3314    (counting from the least significant bit of the reg).  */
3315 
3316 unsigned int
3317 subreg_lsb (const_rtx x)
3318 {
3319   return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3320 		       SUBREG_BYTE (x));
3321 }
3322 
3323 /* Fill in information about a subreg of a hard register.
3324    xregno - A regno of an inner hard subreg_reg (or what will become one).
3325    xmode  - The mode of xregno.
3326    offset - The byte offset.
3327    ymode  - The mode of a top level SUBREG (or what may become one).
3328    info   - Pointer to structure to fill in.
3329 
3330    Rather than considering one particular inner register (and thus one
3331    particular "outer" register) in isolation, this function really uses
3332    XREGNO as a model for a sequence of isomorphic hard registers.  Thus the
3333    function does not check whether adding INFO->offset to XREGNO gives
3334    a valid hard register; even if INFO->offset + XREGNO is out of range,
3335    there might be another register of the same type that is in range.
3336    Likewise it doesn't check whether HARD_REGNO_MODE_OK accepts the new
3337    register, since that can depend on things like whether the final
3338    register number is even or odd.  Callers that want to check whether
3339    this particular subreg can be replaced by a simple (reg ...) should
3340    use simplify_subreg_regno.  */
3341 
3342 void
3343 subreg_get_info (unsigned int xregno, machine_mode xmode,
3344 		 unsigned int offset, machine_mode ymode,
3345 		 struct subreg_info *info)
3346 {
3347   int nregs_xmode, nregs_ymode;
3348   int mode_multiple, nregs_multiple;
3349   int offset_adj, y_offset, y_offset_adj;
3350   int regsize_xmode, regsize_ymode;
3351   bool rknown;
3352 
3353   gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3354 
3355   rknown = false;
3356 
3357   /* If there are holes in a non-scalar mode in registers, we expect
3358      that it is made up of its units concatenated together.  */
3359   if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3360     {
3361       machine_mode xmode_unit;
3362 
3363       nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3364       if (GET_MODE_INNER (xmode) == VOIDmode)
3365 	xmode_unit = xmode;
3366       else
3367 	xmode_unit = GET_MODE_INNER (xmode);
3368       gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3369       gcc_assert (nregs_xmode
3370 		  == (GET_MODE_NUNITS (xmode)
3371 		      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3372       gcc_assert (hard_regno_nregs[xregno][xmode]
3373 		  == (hard_regno_nregs[xregno][xmode_unit]
3374 		      * GET_MODE_NUNITS (xmode)));
3375 
3376       /* You can only ask for a SUBREG of a value with holes in the middle
3377 	 if you don't cross the holes.  (Such a SUBREG should be done by
3378 	 picking a different register class, or doing it in memory if
3379 	 necessary.)  An example of a value with holes is XCmode on 32-bit
3380 	 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3381 	 3 for each part, but in memory it's two 128-bit parts.
3382 	 Padding is assumed to be at the end (not necessarily the 'high part')
3383 	 of each unit.  */
3384       if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3385 	   < GET_MODE_NUNITS (xmode))
3386 	  && (offset / GET_MODE_SIZE (xmode_unit)
3387 	      != ((offset + GET_MODE_SIZE (ymode) - 1)
3388 		  / GET_MODE_SIZE (xmode_unit))))
3389 	{
3390 	  info->representable_p = false;
3391 	  rknown = true;
3392 	}
3393     }
3394   else
3395     nregs_xmode = hard_regno_nregs[xregno][xmode];
3396 
3397   nregs_ymode = hard_regno_nregs[xregno][ymode];
3398 
3399   /* Paradoxical subregs are otherwise valid.  */
3400   if (!rknown
3401       && offset == 0
3402       && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
3403     {
3404       info->representable_p = true;
3405       /* If this is a big endian paradoxical subreg, which uses more
3406 	 actual hard registers than the original register, we must
3407 	 return a negative offset so that we find the proper highpart
3408 	 of the register.  */
3409       if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3410 	  ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3411 	info->offset = nregs_xmode - nregs_ymode;
3412       else
3413 	info->offset = 0;
3414       info->nregs = nregs_ymode;
3415       return;
3416     }
3417 
3418   /* If registers store different numbers of bits in the different
3419      modes, we cannot generally form this subreg.  */
3420   if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3421       && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3422       && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3423       && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3424     {
3425       regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3426       regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3427       if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3428 	{
3429 	  info->representable_p = false;
3430 	  info->nregs
3431 	    = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3432 	  info->offset = offset / regsize_xmode;
3433 	  return;
3434 	}
3435       if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3436 	{
3437 	  info->representable_p = false;
3438 	  info->nregs
3439 	    = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3440 	  info->offset = offset / regsize_xmode;
3441 	  return;
3442 	}
3443       /* Quick exit for the simple and common case of extracting whole
3444 	 subregisters from a multiregister value.  */
3445       /* ??? It would be better to integrate this into the code below,
3446 	 if we can generalize the concept enough and figure out how
3447 	 odd-sized modes can coexist with the other weird cases we support.  */
3448       if (!rknown
3449 	  && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
3450 	  && regsize_xmode == regsize_ymode
3451 	  && (offset % regsize_ymode) == 0)
3452 	{
3453 	  info->representable_p = true;
3454 	  info->nregs = nregs_ymode;
3455 	  info->offset = offset / regsize_ymode;
3456 	  gcc_assert (info->offset + info->nregs <= nregs_xmode);
3457 	  return;
3458 	}
3459     }
3460 
3461   /* Lowpart subregs are otherwise valid.  */
3462   if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3463     {
3464       info->representable_p = true;
3465       rknown = true;
3466 
3467       if (offset == 0 || nregs_xmode == nregs_ymode)
3468 	{
3469 	  info->offset = 0;
3470 	  info->nregs = nregs_ymode;
3471 	  return;
3472 	}
3473     }
3474 
3475   /* This should always pass, otherwise we don't know how to verify
3476      the constraint.  These conditions may be relaxed but
3477      subreg_regno_offset would need to be redesigned.  */
3478   gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3479   gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3480 
3481   if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3482       && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3483     {
3484       HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3485       HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3486       HOST_WIDE_INT off_low = offset & (ysize - 1);
3487       HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3488       offset = (xsize - ysize - off_high) | off_low;
3489     }
3490   /* The XMODE value can be seen as a vector of NREGS_XMODE
3491      values.  The subreg must represent a lowpart of given field.
3492      Compute what field it is.  */
3493   offset_adj = offset;
3494   offset_adj -= subreg_lowpart_offset (ymode,
3495 				       mode_for_size (GET_MODE_BITSIZE (xmode)
3496 						      / nregs_xmode,
3497 						      MODE_INT, 0));
3498 
3499   /* Size of ymode must not be greater than the size of xmode.  */
3500   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3501   gcc_assert (mode_multiple != 0);
3502 
3503   y_offset = offset / GET_MODE_SIZE (ymode);
3504   y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3505   nregs_multiple = nregs_xmode / nregs_ymode;
3506 
3507   gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3508   gcc_assert ((mode_multiple % nregs_multiple) == 0);
3509 
3510   if (!rknown)
3511     {
3512       info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3513       rknown = true;
3514     }
3515   info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3516   info->nregs = nregs_ymode;
3517 }
3518 
3519 /* This function returns the regno offset of a subreg expression.
3520    xregno - A regno of an inner hard subreg_reg (or what will become one).
3521    xmode  - The mode of xregno.
3522    offset - The byte offset.
3523    ymode  - The mode of a top level SUBREG (or what may become one).
3524    RETURN - The regno offset which would be used.  */
3525 unsigned int
3526 subreg_regno_offset (unsigned int xregno, machine_mode xmode,
3527 		     unsigned int offset, machine_mode ymode)
3528 {
3529   struct subreg_info info;
3530   subreg_get_info (xregno, xmode, offset, ymode, &info);
3531   return info.offset;
3532 }
3533 
3534 /* This function returns true when the offset is representable via
3535    subreg_offset in the given regno.
3536    xregno - A regno of an inner hard subreg_reg (or what will become one).
3537    xmode  - The mode of xregno.
3538    offset - The byte offset.
3539    ymode  - The mode of a top level SUBREG (or what may become one).
3540    RETURN - Whether the offset is representable.  */
3541 bool
3542 subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
3543 			       unsigned int offset, machine_mode ymode)
3544 {
3545   struct subreg_info info;
3546   subreg_get_info (xregno, xmode, offset, ymode, &info);
3547   return info.representable_p;
3548 }
3549 
3550 /* Return the number of a YMODE register to which
3551 
3552        (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3553 
3554    can be simplified.  Return -1 if the subreg can't be simplified.
3555 
3556    XREGNO is a hard register number.  */
3557 
3558 int
3559 simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
3560 		       unsigned int offset, machine_mode ymode)
3561 {
3562   struct subreg_info info;
3563   unsigned int yregno;
3564 
3565 #ifdef CANNOT_CHANGE_MODE_CLASS
3566   /* Give the backend a chance to disallow the mode change.  */
3567   if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3568       && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3569       && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode)
3570       /* We can use mode change in LRA for some transformations.  */
3571       && ! lra_in_progress)
3572     return -1;
3573 #endif
3574 
3575   /* We shouldn't simplify stack-related registers.  */
3576   if ((!reload_completed || frame_pointer_needed)
3577       && xregno == FRAME_POINTER_REGNUM)
3578     return -1;
3579 
3580   if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3581       && xregno == ARG_POINTER_REGNUM)
3582     return -1;
3583 
3584   if (xregno == STACK_POINTER_REGNUM
3585       /* We should convert hard stack register in LRA if it is
3586 	 possible.  */
3587       && ! lra_in_progress)
3588     return -1;
3589 
3590   /* Try to get the register offset.  */
3591   subreg_get_info (xregno, xmode, offset, ymode, &info);
3592   if (!info.representable_p)
3593     return -1;
3594 
3595   /* Make sure that the offsetted register value is in range.  */
3596   yregno = xregno + info.offset;
3597   if (!HARD_REGISTER_NUM_P (yregno))
3598     return -1;
3599 
3600   /* See whether (reg:YMODE YREGNO) is valid.
3601 
3602      ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3603      This is a kludge to work around how complex FP arguments are passed
3604      on IA-64 and should be fixed.  See PR target/49226.  */
3605   if (!HARD_REGNO_MODE_OK (yregno, ymode)
3606       && HARD_REGNO_MODE_OK (xregno, xmode))
3607     return -1;
3608 
3609   return (int) yregno;
3610 }
3611 
3612 /* Return the final regno that a subreg expression refers to.  */
3613 unsigned int
3614 subreg_regno (const_rtx x)
3615 {
3616   unsigned int ret;
3617   rtx subreg = SUBREG_REG (x);
3618   int regno = REGNO (subreg);
3619 
3620   ret = regno + subreg_regno_offset (regno,
3621 				     GET_MODE (subreg),
3622 				     SUBREG_BYTE (x),
3623 				     GET_MODE (x));
3624   return ret;
3625 
3626 }
3627 
3628 /* Return the number of registers that a subreg expression refers
3629    to.  */
3630 unsigned int
3631 subreg_nregs (const_rtx x)
3632 {
3633   return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3634 }
3635 
3636 /* Return the number of registers that a subreg REG with REGNO
3637    expression refers to.  This is a copy of the rtlanal.c:subreg_nregs
3638    changed so that the regno can be passed in. */
3639 
3640 unsigned int
3641 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3642 {
3643   struct subreg_info info;
3644   rtx subreg = SUBREG_REG (x);
3645 
3646   subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3647 		   &info);
3648   return info.nregs;
3649 }
3650 
3651 
3652 struct parms_set_data
3653 {
3654   int nregs;
3655   HARD_REG_SET regs;
3656 };
3657 
3658 /* Helper function for noticing stores to parameter registers.  */
3659 static void
3660 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3661 {
3662   struct parms_set_data *const d = (struct parms_set_data *) data;
3663   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3664       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3665     {
3666       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3667       d->nregs--;
3668     }
3669 }
3670 
3671 /* Look backward for first parameter to be loaded.
3672    Note that loads of all parameters will not necessarily be
3673    found if CSE has eliminated some of them (e.g., an argument
3674    to the outer function is passed down as a parameter).
3675    Do not skip BOUNDARY.  */
3676 rtx_insn *
3677 find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
3678 {
3679   struct parms_set_data parm;
3680   rtx p;
3681   rtx_insn *before, *first_set;
3682 
3683   /* Since different machines initialize their parameter registers
3684      in different orders, assume nothing.  Collect the set of all
3685      parameter registers.  */
3686   CLEAR_HARD_REG_SET (parm.regs);
3687   parm.nregs = 0;
3688   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3689     if (GET_CODE (XEXP (p, 0)) == USE
3690 	&& REG_P (XEXP (XEXP (p, 0), 0)))
3691       {
3692 	gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3693 
3694 	/* We only care about registers which can hold function
3695 	   arguments.  */
3696 	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3697 	  continue;
3698 
3699 	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3700 	parm.nregs++;
3701       }
3702   before = call_insn;
3703   first_set = call_insn;
3704 
3705   /* Search backward for the first set of a register in this set.  */
3706   while (parm.nregs && before != boundary)
3707     {
3708       before = PREV_INSN (before);
3709 
3710       /* It is possible that some loads got CSEed from one call to
3711          another.  Stop in that case.  */
3712       if (CALL_P (before))
3713 	break;
3714 
3715       /* Our caller needs either ensure that we will find all sets
3716          (in case code has not been optimized yet), or take care
3717          for possible labels in a way by setting boundary to preceding
3718          CODE_LABEL.  */
3719       if (LABEL_P (before))
3720 	{
3721 	  gcc_assert (before == boundary);
3722 	  break;
3723 	}
3724 
3725       if (INSN_P (before))
3726 	{
3727 	  int nregs_old = parm.nregs;
3728 	  note_stores (PATTERN (before), parms_set, &parm);
3729 	  /* If we found something that did not set a parameter reg,
3730 	     we're done.  Do not keep going, as that might result
3731 	     in hoisting an insn before the setting of a pseudo
3732 	     that is used by the hoisted insn. */
3733 	  if (nregs_old != parm.nregs)
3734 	    first_set = before;
3735 	  else
3736 	    break;
3737 	}
3738     }
3739   return first_set;
3740 }
3741 
3742 /* Return true if we should avoid inserting code between INSN and preceding
3743    call instruction.  */
3744 
3745 bool
3746 keep_with_call_p (const rtx_insn *insn)
3747 {
3748   rtx set;
3749 
3750   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3751     {
3752       if (REG_P (SET_DEST (set))
3753 	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3754 	  && fixed_regs[REGNO (SET_DEST (set))]
3755 	  && general_operand (SET_SRC (set), VOIDmode))
3756 	return true;
3757       if (REG_P (SET_SRC (set))
3758 	  && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
3759 	  && REG_P (SET_DEST (set))
3760 	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3761 	return true;
3762       /* There may be a stack pop just after the call and before the store
3763 	 of the return register.  Search for the actual store when deciding
3764 	 if we can break or not.  */
3765       if (SET_DEST (set) == stack_pointer_rtx)
3766 	{
3767 	  /* This CONST_CAST is okay because next_nonnote_insn just
3768 	     returns its argument and we assign it to a const_rtx
3769 	     variable.  */
3770 	  const rtx_insn *i2
3771 	    = next_nonnote_insn (const_cast<rtx_insn *> (insn));
3772 	  if (i2 && keep_with_call_p (i2))
3773 	    return true;
3774 	}
3775     }
3776   return false;
3777 }
3778 
3779 /* Return true if LABEL is a target of JUMP_INSN.  This applies only
3780    to non-complex jumps.  That is, direct unconditional, conditional,
3781    and tablejumps, but not computed jumps or returns.  It also does
3782    not apply to the fallthru case of a conditional jump.  */
3783 
3784 bool
3785 label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
3786 {
3787   rtx tmp = JUMP_LABEL (jump_insn);
3788   rtx_jump_table_data *table;
3789 
3790   if (label == tmp)
3791     return true;
3792 
3793   if (tablejump_p (jump_insn, NULL, &table))
3794     {
3795       rtvec vec = table->get_labels ();
3796       int i, veclen = GET_NUM_ELEM (vec);
3797 
3798       for (i = 0; i < veclen; ++i)
3799 	if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3800 	  return true;
3801     }
3802 
3803   if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3804     return true;
3805 
3806   return false;
3807 }
3808 
3809 
3810 /* Return an estimate of the cost of computing rtx X.
3811    One use is in cse, to decide which expression to keep in the hash table.
3812    Another is in rtl generation, to pick the cheapest way to multiply.
3813    Other uses like the latter are expected in the future.
3814 
3815    X appears as operand OPNO in an expression with code OUTER_CODE.
3816    SPEED specifies whether costs optimized for speed or size should
3817    be returned.  */
3818 
3819 int
3820 rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
3821 {
3822   int i, j;
3823   enum rtx_code code;
3824   const char *fmt;
3825   int total;
3826   int factor;
3827 
3828   if (x == 0)
3829     return 0;
3830 
3831   /* A size N times larger than UNITS_PER_WORD likely needs N times as
3832      many insns, taking N times as long.  */
3833   factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD;
3834   if (factor == 0)
3835     factor = 1;
3836 
3837   /* Compute the default costs of certain things.
3838      Note that targetm.rtx_costs can override the defaults.  */
3839 
3840   code = GET_CODE (x);
3841   switch (code)
3842     {
3843     case MULT:
3844       /* Multiplication has time-complexity O(N*N), where N is the
3845 	 number of units (translated from digits) when using
3846 	 schoolbook long multiplication.  */
3847       total = factor * factor * COSTS_N_INSNS (5);
3848       break;
3849     case DIV:
3850     case UDIV:
3851     case MOD:
3852     case UMOD:
3853       /* Similarly, complexity for schoolbook long division.  */
3854       total = factor * factor * COSTS_N_INSNS (7);
3855       break;
3856     case USE:
3857       /* Used in combine.c as a marker.  */
3858       total = 0;
3859       break;
3860     case SET:
3861       /* A SET doesn't have a mode, so let's look at the SET_DEST to get
3862 	 the mode for the factor.  */
3863       factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD;
3864       if (factor == 0)
3865 	factor = 1;
3866       /* Pass through.  */
3867     default:
3868       total = factor * COSTS_N_INSNS (1);
3869     }
3870 
3871   switch (code)
3872     {
3873     case REG:
3874       return 0;
3875 
3876     case SUBREG:
3877       total = 0;
3878       /* If we can't tie these modes, make this expensive.  The larger
3879 	 the mode, the more expensive it is.  */
3880       if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3881 	return COSTS_N_INSNS (2 + factor);
3882       break;
3883 
3884     default:
3885       if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
3886 	return total;
3887       break;
3888     }
3889 
3890   /* Sum the costs of the sub-rtx's, plus cost of this operation,
3891      which is already in total.  */
3892 
3893   fmt = GET_RTX_FORMAT (code);
3894   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3895     if (fmt[i] == 'e')
3896       total += rtx_cost (XEXP (x, i), code, i, speed);
3897     else if (fmt[i] == 'E')
3898       for (j = 0; j < XVECLEN (x, i); j++)
3899 	total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
3900 
3901   return total;
3902 }
3903 
3904 /* Fill in the structure C with information about both speed and size rtx
3905    costs for X, which is operand OPNO in an expression with code OUTER.  */
3906 
3907 void
3908 get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
3909 		   struct full_rtx_costs *c)
3910 {
3911   c->speed = rtx_cost (x, outer, opno, true);
3912   c->size = rtx_cost (x, outer, opno, false);
3913 }
3914 
3915 
3916 /* Return cost of address expression X.
3917    Expect that X is properly formed address reference.
3918 
3919    SPEED parameter specify whether costs optimized for speed or size should
3920    be returned.  */
3921 
3922 int
3923 address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
3924 {
3925   /* We may be asked for cost of various unusual addresses, such as operands
3926      of push instruction.  It is not worthwhile to complicate writing
3927      of the target hook by such cases.  */
3928 
3929   if (!memory_address_addr_space_p (mode, x, as))
3930     return 1000;
3931 
3932   return targetm.address_cost (x, mode, as, speed);
3933 }
3934 
3935 /* If the target doesn't override, compute the cost as with arithmetic.  */
3936 
3937 int
3938 default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
3939 {
3940   return rtx_cost (x, MEM, 0, speed);
3941 }
3942 
3943 
3944 unsigned HOST_WIDE_INT
3945 nonzero_bits (const_rtx x, machine_mode mode)
3946 {
3947   return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3948 }
3949 
3950 unsigned int
3951 num_sign_bit_copies (const_rtx x, machine_mode mode)
3952 {
3953   return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3954 }
3955 
3956 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3957    It avoids exponential behavior in nonzero_bits1 when X has
3958    identical subexpressions on the first or the second level.  */
3959 
3960 static unsigned HOST_WIDE_INT
3961 cached_nonzero_bits (const_rtx x, machine_mode mode, const_rtx known_x,
3962 		     machine_mode known_mode,
3963 		     unsigned HOST_WIDE_INT known_ret)
3964 {
3965   if (x == known_x && mode == known_mode)
3966     return known_ret;
3967 
3968   /* Try to find identical subexpressions.  If found call
3969      nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3970      precomputed value for the subexpression as KNOWN_RET.  */
3971 
3972   if (ARITHMETIC_P (x))
3973     {
3974       rtx x0 = XEXP (x, 0);
3975       rtx x1 = XEXP (x, 1);
3976 
3977       /* Check the first level.  */
3978       if (x0 == x1)
3979 	return nonzero_bits1 (x, mode, x0, mode,
3980 			      cached_nonzero_bits (x0, mode, known_x,
3981 						   known_mode, known_ret));
3982 
3983       /* Check the second level.  */
3984       if (ARITHMETIC_P (x0)
3985 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3986 	return nonzero_bits1 (x, mode, x1, mode,
3987 			      cached_nonzero_bits (x1, mode, known_x,
3988 						   known_mode, known_ret));
3989 
3990       if (ARITHMETIC_P (x1)
3991 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3992 	return nonzero_bits1 (x, mode, x0, mode,
3993 			      cached_nonzero_bits (x0, mode, known_x,
3994 						   known_mode, known_ret));
3995     }
3996 
3997   return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3998 }
3999 
4000 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
4001    We don't let nonzero_bits recur into num_sign_bit_copies, because that
4002    is less useful.  We can't allow both, because that results in exponential
4003    run time recursion.  There is a nullstone testcase that triggered
4004    this.  This macro avoids accidental uses of num_sign_bit_copies.  */
4005 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
4006 
4007 /* Given an expression, X, compute which bits in X can be nonzero.
4008    We don't care about bits outside of those defined in MODE.
4009 
4010    For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
4011    an arithmetic operation, we can do better.  */
4012 
4013 static unsigned HOST_WIDE_INT
4014 nonzero_bits1 (const_rtx x, machine_mode mode, const_rtx known_x,
4015 	       machine_mode known_mode,
4016 	       unsigned HOST_WIDE_INT known_ret)
4017 {
4018   unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
4019   unsigned HOST_WIDE_INT inner_nz;
4020   enum rtx_code code;
4021   machine_mode inner_mode;
4022   unsigned int mode_width = GET_MODE_PRECISION (mode);
4023 
4024   /* For floating-point and vector values, assume all bits are needed.  */
4025   if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
4026       || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4027     return nonzero;
4028 
4029   /* If X is wider than MODE, use its mode instead.  */
4030   if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
4031     {
4032       mode = GET_MODE (x);
4033       nonzero = GET_MODE_MASK (mode);
4034       mode_width = GET_MODE_PRECISION (mode);
4035     }
4036 
4037   if (mode_width > HOST_BITS_PER_WIDE_INT)
4038     /* Our only callers in this case look for single bit values.  So
4039        just return the mode mask.  Those tests will then be false.  */
4040     return nonzero;
4041 
4042 #ifndef WORD_REGISTER_OPERATIONS
4043   /* If MODE is wider than X, but both are a single word for both the host
4044      and target machines, we can compute this from which bits of the
4045      object might be nonzero in its own mode, taking into account the fact
4046      that on many CISC machines, accessing an object in a wider mode
4047      causes the high-order bits to become undefined.  So they are
4048      not known to be zero.  */
4049 
4050   if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
4051       && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
4052       && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
4053       && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
4054     {
4055       nonzero &= cached_nonzero_bits (x, GET_MODE (x),
4056 				      known_x, known_mode, known_ret);
4057       nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
4058       return nonzero;
4059     }
4060 #endif
4061 
4062   code = GET_CODE (x);
4063   switch (code)
4064     {
4065     case REG:
4066 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4067       /* If pointers extend unsigned and this is a pointer in Pmode, say that
4068 	 all the bits above ptr_mode are known to be zero.  */
4069       /* As we do not know which address space the pointer is referring to,
4070 	 we can do this only if the target does not support different pointer
4071 	 or address modes depending on the address space.  */
4072       if (target_default_pointer_address_modes_p ()
4073 	  && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4074 	  && REG_POINTER (x))
4075 	nonzero &= GET_MODE_MASK (ptr_mode);
4076 #endif
4077 
4078       /* Include declared information about alignment of pointers.  */
4079       /* ??? We don't properly preserve REG_POINTER changes across
4080 	 pointer-to-integer casts, so we can't trust it except for
4081 	 things that we know must be pointers.  See execute/960116-1.c.  */
4082       if ((x == stack_pointer_rtx
4083 	   || x == frame_pointer_rtx
4084 	   || x == arg_pointer_rtx)
4085 	  && REGNO_POINTER_ALIGN (REGNO (x)))
4086 	{
4087 	  unsigned HOST_WIDE_INT alignment
4088 	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4089 
4090 #ifdef PUSH_ROUNDING
4091 	  /* If PUSH_ROUNDING is defined, it is possible for the
4092 	     stack to be momentarily aligned only to that amount,
4093 	     so we pick the least alignment.  */
4094 	  if (x == stack_pointer_rtx && PUSH_ARGS)
4095 	    alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
4096 			     alignment);
4097 #endif
4098 
4099 	  nonzero &= ~(alignment - 1);
4100 	}
4101 
4102       {
4103 	unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4104 	rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
4105 					      known_mode, known_ret,
4106 					      &nonzero_for_hook);
4107 
4108 	if (new_rtx)
4109 	  nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
4110 						   known_mode, known_ret);
4111 
4112 	return nonzero_for_hook;
4113       }
4114 
4115     case CONST_INT:
4116 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4117       /* If X is negative in MODE, sign-extend the value.  */
4118       if (INTVAL (x) > 0
4119 	  && mode_width < BITS_PER_WORD
4120 	  && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4121 	     != 0)
4122 	return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
4123 #endif
4124 
4125       return UINTVAL (x);
4126 
4127     case MEM:
4128 #ifdef LOAD_EXTEND_OP
4129       /* In many, if not most, RISC machines, reading a byte from memory
4130 	 zeros the rest of the register.  Noticing that fact saves a lot
4131 	 of extra zero-extends.  */
4132       if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4133 	nonzero &= GET_MODE_MASK (GET_MODE (x));
4134 #endif
4135       break;
4136 
4137     case EQ:  case NE:
4138     case UNEQ:  case LTGT:
4139     case GT:  case GTU:  case UNGT:
4140     case LT:  case LTU:  case UNLT:
4141     case GE:  case GEU:  case UNGE:
4142     case LE:  case LEU:  case UNLE:
4143     case UNORDERED: case ORDERED:
4144       /* If this produces an integer result, we know which bits are set.
4145 	 Code here used to clear bits outside the mode of X, but that is
4146 	 now done above.  */
4147       /* Mind that MODE is the mode the caller wants to look at this
4148 	 operation in, and not the actual operation mode.  We can wind
4149 	 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4150 	 that describes the results of a vector compare.  */
4151       if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
4152 	  && mode_width <= HOST_BITS_PER_WIDE_INT)
4153 	nonzero = STORE_FLAG_VALUE;
4154       break;
4155 
4156     case NEG:
4157 #if 0
4158       /* Disabled to avoid exponential mutual recursion between nonzero_bits
4159 	 and num_sign_bit_copies.  */
4160       if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4161 	  == GET_MODE_PRECISION (GET_MODE (x)))
4162 	nonzero = 1;
4163 #endif
4164 
4165       if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
4166 	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4167       break;
4168 
4169     case ABS:
4170 #if 0
4171       /* Disabled to avoid exponential mutual recursion between nonzero_bits
4172 	 and num_sign_bit_copies.  */
4173       if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4174 	  == GET_MODE_PRECISION (GET_MODE (x)))
4175 	nonzero = 1;
4176 #endif
4177       break;
4178 
4179     case TRUNCATE:
4180       nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4181 				       known_x, known_mode, known_ret)
4182 		  & GET_MODE_MASK (mode));
4183       break;
4184 
4185     case ZERO_EXTEND:
4186       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4187 				      known_x, known_mode, known_ret);
4188       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4189 	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4190       break;
4191 
4192     case SIGN_EXTEND:
4193       /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4194 	 Otherwise, show all the bits in the outer mode but not the inner
4195 	 may be nonzero.  */
4196       inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4197 				      known_x, known_mode, known_ret);
4198       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4199 	{
4200 	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4201 	  if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
4202 	    inner_nz |= (GET_MODE_MASK (mode)
4203 			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4204 	}
4205 
4206       nonzero &= inner_nz;
4207       break;
4208 
4209     case AND:
4210       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4211 				       known_x, known_mode, known_ret)
4212       		 & cached_nonzero_bits (XEXP (x, 1), mode,
4213 					known_x, known_mode, known_ret);
4214       break;
4215 
4216     case XOR:   case IOR:
4217     case UMIN:  case UMAX:  case SMIN:  case SMAX:
4218       {
4219 	unsigned HOST_WIDE_INT nonzero0
4220 	   = cached_nonzero_bits (XEXP (x, 0), mode,
4221 				  known_x, known_mode, known_ret);
4222 
4223 	/* Don't call nonzero_bits for the second time if it cannot change
4224 	   anything.  */
4225 	if ((nonzero & nonzero0) != nonzero)
4226 	  nonzero &= nonzero0
4227       		     | cached_nonzero_bits (XEXP (x, 1), mode,
4228 					    known_x, known_mode, known_ret);
4229       }
4230       break;
4231 
4232     case PLUS:  case MINUS:
4233     case MULT:
4234     case DIV:   case UDIV:
4235     case MOD:   case UMOD:
4236       /* We can apply the rules of arithmetic to compute the number of
4237 	 high- and low-order zero bits of these operations.  We start by
4238 	 computing the width (position of the highest-order nonzero bit)
4239 	 and the number of low-order zero bits for each value.  */
4240       {
4241 	unsigned HOST_WIDE_INT nz0
4242 	  = cached_nonzero_bits (XEXP (x, 0), mode,
4243 				 known_x, known_mode, known_ret);
4244 	unsigned HOST_WIDE_INT nz1
4245 	  = cached_nonzero_bits (XEXP (x, 1), mode,
4246 				 known_x, known_mode, known_ret);
4247 	int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
4248 	int width0 = floor_log2 (nz0) + 1;
4249 	int width1 = floor_log2 (nz1) + 1;
4250 	int low0 = floor_log2 (nz0 & -nz0);
4251 	int low1 = floor_log2 (nz1 & -nz1);
4252 	unsigned HOST_WIDE_INT op0_maybe_minusp
4253 	  = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4254 	unsigned HOST_WIDE_INT op1_maybe_minusp
4255 	  = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4256 	unsigned int result_width = mode_width;
4257 	int result_low = 0;
4258 
4259 	switch (code)
4260 	  {
4261 	  case PLUS:
4262 	    result_width = MAX (width0, width1) + 1;
4263 	    result_low = MIN (low0, low1);
4264 	    break;
4265 	  case MINUS:
4266 	    result_low = MIN (low0, low1);
4267 	    break;
4268 	  case MULT:
4269 	    result_width = width0 + width1;
4270 	    result_low = low0 + low1;
4271 	    break;
4272 	  case DIV:
4273 	    if (width1 == 0)
4274 	      break;
4275 	    if (!op0_maybe_minusp && !op1_maybe_minusp)
4276 	      result_width = width0;
4277 	    break;
4278 	  case UDIV:
4279 	    if (width1 == 0)
4280 	      break;
4281 	    result_width = width0;
4282 	    break;
4283 	  case MOD:
4284 	    if (width1 == 0)
4285 	      break;
4286 	    if (!op0_maybe_minusp && !op1_maybe_minusp)
4287 	      result_width = MIN (width0, width1);
4288 	    result_low = MIN (low0, low1);
4289 	    break;
4290 	  case UMOD:
4291 	    if (width1 == 0)
4292 	      break;
4293 	    result_width = MIN (width0, width1);
4294 	    result_low = MIN (low0, low1);
4295 	    break;
4296 	  default:
4297 	    gcc_unreachable ();
4298 	  }
4299 
4300 	if (result_width < mode_width)
4301 	  nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
4302 
4303 	if (result_low > 0)
4304 	  nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
4305       }
4306       break;
4307 
4308     case ZERO_EXTRACT:
4309       if (CONST_INT_P (XEXP (x, 1))
4310 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4311 	nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4312       break;
4313 
4314     case SUBREG:
4315       /* If this is a SUBREG formed for a promoted variable that has
4316 	 been zero-extended, we know that at least the high-order bits
4317 	 are zero, though others might be too.  */
4318 
4319       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
4320 	nonzero = GET_MODE_MASK (GET_MODE (x))
4321 		  & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4322 					 known_x, known_mode, known_ret);
4323 
4324       inner_mode = GET_MODE (SUBREG_REG (x));
4325       /* If the inner mode is a single word for both the host and target
4326 	 machines, we can compute this from which bits of the inner
4327 	 object might be nonzero.  */
4328       if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4329 	  && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
4330 	{
4331 	  nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4332 					  known_x, known_mode, known_ret);
4333 
4334 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4335 	  /* If this is a typical RISC machine, we only have to worry
4336 	     about the way loads are extended.  */
4337 	  if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4338 	       ? val_signbit_known_set_p (inner_mode, nonzero)
4339 	       : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
4340 	      || !MEM_P (SUBREG_REG (x)))
4341 #endif
4342 	    {
4343 	      /* On many CISC machines, accessing an object in a wider mode
4344 		 causes the high-order bits to become undefined.  So they are
4345 		 not known to be zero.  */
4346 	      if (GET_MODE_PRECISION (GET_MODE (x))
4347 		  > GET_MODE_PRECISION (inner_mode))
4348 		nonzero |= (GET_MODE_MASK (GET_MODE (x))
4349 			    & ~GET_MODE_MASK (inner_mode));
4350 	    }
4351 	}
4352       break;
4353 
4354     case ASHIFTRT:
4355     case LSHIFTRT:
4356     case ASHIFT:
4357     case ROTATE:
4358       /* The nonzero bits are in two classes: any bits within MODE
4359 	 that aren't in GET_MODE (x) are always significant.  The rest of the
4360 	 nonzero bits are those that are significant in the operand of
4361 	 the shift when shifted the appropriate number of bits.  This
4362 	 shows that high-order bits are cleared by the right shift and
4363 	 low-order bits by left shifts.  */
4364       if (CONST_INT_P (XEXP (x, 1))
4365 	  && INTVAL (XEXP (x, 1)) >= 0
4366 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4367 	  && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4368 	{
4369 	  machine_mode inner_mode = GET_MODE (x);
4370 	  unsigned int width = GET_MODE_PRECISION (inner_mode);
4371 	  int count = INTVAL (XEXP (x, 1));
4372 	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4373 	  unsigned HOST_WIDE_INT op_nonzero
4374 	    = cached_nonzero_bits (XEXP (x, 0), mode,
4375 				   known_x, known_mode, known_ret);
4376 	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4377 	  unsigned HOST_WIDE_INT outer = 0;
4378 
4379 	  if (mode_width > width)
4380 	    outer = (op_nonzero & nonzero & ~mode_mask);
4381 
4382 	  if (code == LSHIFTRT)
4383 	    inner >>= count;
4384 	  else if (code == ASHIFTRT)
4385 	    {
4386 	      inner >>= count;
4387 
4388 	      /* If the sign bit may have been nonzero before the shift, we
4389 		 need to mark all the places it could have been copied to
4390 		 by the shift as possibly nonzero.  */
4391 	      if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4392 		inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4393 			   << (width - count);
4394 	    }
4395 	  else if (code == ASHIFT)
4396 	    inner <<= count;
4397 	  else
4398 	    inner = ((inner << (count % width)
4399 		      | (inner >> (width - (count % width)))) & mode_mask);
4400 
4401 	  nonzero &= (outer | inner);
4402 	}
4403       break;
4404 
4405     case FFS:
4406     case POPCOUNT:
4407       /* This is at most the number of bits in the mode.  */
4408       nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4409       break;
4410 
4411     case CLZ:
4412       /* If CLZ has a known value at zero, then the nonzero bits are
4413 	 that value, plus the number of bits in the mode minus one.  */
4414       if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4415 	nonzero
4416 	  |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4417       else
4418 	nonzero = -1;
4419       break;
4420 
4421     case CTZ:
4422       /* If CTZ has a known value at zero, then the nonzero bits are
4423 	 that value, plus the number of bits in the mode minus one.  */
4424       if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4425 	nonzero
4426 	  |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4427       else
4428 	nonzero = -1;
4429       break;
4430 
4431     case CLRSB:
4432       /* This is at most the number of bits in the mode minus 1.  */
4433       nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4434       break;
4435 
4436     case PARITY:
4437       nonzero = 1;
4438       break;
4439 
4440     case IF_THEN_ELSE:
4441       {
4442 	unsigned HOST_WIDE_INT nonzero_true
4443 	  = cached_nonzero_bits (XEXP (x, 1), mode,
4444 				 known_x, known_mode, known_ret);
4445 
4446 	/* Don't call nonzero_bits for the second time if it cannot change
4447 	   anything.  */
4448 	if ((nonzero & nonzero_true) != nonzero)
4449 	  nonzero &= nonzero_true
4450       		     | cached_nonzero_bits (XEXP (x, 2), mode,
4451 					    known_x, known_mode, known_ret);
4452       }
4453       break;
4454 
4455     default:
4456       break;
4457     }
4458 
4459   return nonzero;
4460 }
4461 
4462 /* See the macro definition above.  */
4463 #undef cached_num_sign_bit_copies
4464 
4465 
4466 /* The function cached_num_sign_bit_copies is a wrapper around
4467    num_sign_bit_copies1.  It avoids exponential behavior in
4468    num_sign_bit_copies1 when X has identical subexpressions on the
4469    first or the second level.  */
4470 
4471 static unsigned int
4472 cached_num_sign_bit_copies (const_rtx x, machine_mode mode, const_rtx known_x,
4473 			    machine_mode known_mode,
4474 			    unsigned int known_ret)
4475 {
4476   if (x == known_x && mode == known_mode)
4477     return known_ret;
4478 
4479   /* Try to find identical subexpressions.  If found call
4480      num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4481      the precomputed value for the subexpression as KNOWN_RET.  */
4482 
4483   if (ARITHMETIC_P (x))
4484     {
4485       rtx x0 = XEXP (x, 0);
4486       rtx x1 = XEXP (x, 1);
4487 
4488       /* Check the first level.  */
4489       if (x0 == x1)
4490 	return
4491 	  num_sign_bit_copies1 (x, mode, x0, mode,
4492 				cached_num_sign_bit_copies (x0, mode, known_x,
4493 							    known_mode,
4494 							    known_ret));
4495 
4496       /* Check the second level.  */
4497       if (ARITHMETIC_P (x0)
4498 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4499 	return
4500 	  num_sign_bit_copies1 (x, mode, x1, mode,
4501 				cached_num_sign_bit_copies (x1, mode, known_x,
4502 							    known_mode,
4503 							    known_ret));
4504 
4505       if (ARITHMETIC_P (x1)
4506 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4507 	return
4508 	  num_sign_bit_copies1 (x, mode, x0, mode,
4509 				cached_num_sign_bit_copies (x0, mode, known_x,
4510 							    known_mode,
4511 							    known_ret));
4512     }
4513 
4514   return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4515 }
4516 
4517 /* Return the number of bits at the high-order end of X that are known to
4518    be equal to the sign bit.  X will be used in mode MODE; if MODE is
4519    VOIDmode, X will be used in its own mode.  The returned value  will always
4520    be between 1 and the number of bits in MODE.  */
4521 
4522 static unsigned int
4523 num_sign_bit_copies1 (const_rtx x, machine_mode mode, const_rtx known_x,
4524 		      machine_mode known_mode,
4525 		      unsigned int known_ret)
4526 {
4527   enum rtx_code code = GET_CODE (x);
4528   unsigned int bitwidth = GET_MODE_PRECISION (mode);
4529   int num0, num1, result;
4530   unsigned HOST_WIDE_INT nonzero;
4531 
4532   /* If we weren't given a mode, use the mode of X.  If the mode is still
4533      VOIDmode, we don't know anything.  Likewise if one of the modes is
4534      floating-point.  */
4535 
4536   if (mode == VOIDmode)
4537     mode = GET_MODE (x);
4538 
4539   if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4540       || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4541     return 1;
4542 
4543   /* For a smaller object, just ignore the high bits.  */
4544   if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
4545     {
4546       num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4547 					 known_x, known_mode, known_ret);
4548       return MAX (1,
4549 		  num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
4550     }
4551 
4552   if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
4553     {
4554 #ifndef WORD_REGISTER_OPERATIONS
4555       /* If this machine does not do all register operations on the entire
4556 	 register and MODE is wider than the mode of X, we can say nothing
4557 	 at all about the high-order bits.  */
4558       return 1;
4559 #else
4560       /* Likewise on machines that do, if the mode of the object is smaller
4561 	 than a word and loads of that size don't sign extend, we can say
4562 	 nothing about the high order bits.  */
4563       if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
4564 #ifdef LOAD_EXTEND_OP
4565 	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4566 #endif
4567 	  )
4568 	return 1;
4569 #endif
4570     }
4571 
4572   switch (code)
4573     {
4574     case REG:
4575 
4576 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4577       /* If pointers extend signed and this is a pointer in Pmode, say that
4578 	 all the bits above ptr_mode are known to be sign bit copies.  */
4579       /* As we do not know which address space the pointer is referring to,
4580 	 we can do this only if the target does not support different pointer
4581 	 or address modes depending on the address space.  */
4582       if (target_default_pointer_address_modes_p ()
4583 	  && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4584 	  && mode == Pmode && REG_POINTER (x))
4585 	return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
4586 #endif
4587 
4588       {
4589 	unsigned int copies_for_hook = 1, copies = 1;
4590 	rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4591 						     known_mode, known_ret,
4592 						     &copies_for_hook);
4593 
4594 	if (new_rtx)
4595 	  copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4596 					       known_mode, known_ret);
4597 
4598 	if (copies > 1 || copies_for_hook > 1)
4599 	  return MAX (copies, copies_for_hook);
4600 
4601 	/* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
4602       }
4603       break;
4604 
4605     case MEM:
4606 #ifdef LOAD_EXTEND_OP
4607       /* Some RISC machines sign-extend all loads of smaller than a word.  */
4608       if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4609 	return MAX (1, ((int) bitwidth
4610 			- (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
4611 #endif
4612       break;
4613 
4614     case CONST_INT:
4615       /* If the constant is negative, take its 1's complement and remask.
4616 	 Then see how many zero bits we have.  */
4617       nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
4618       if (bitwidth <= HOST_BITS_PER_WIDE_INT
4619 	  && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4620 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
4621 
4622       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4623 
4624     case SUBREG:
4625       /* If this is a SUBREG for a promoted object that is sign-extended
4626 	 and we are looking at it in a wider mode, we know that at least the
4627 	 high-order bits are known to be sign bit copies.  */
4628 
4629       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
4630 	{
4631 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4632 					     known_x, known_mode, known_ret);
4633 	  return MAX ((int) bitwidth
4634 		      - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
4635 		      num0);
4636 	}
4637 
4638       /* For a smaller object, just ignore the high bits.  */
4639       if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
4640 	{
4641 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4642 					     known_x, known_mode, known_ret);
4643 	  return MAX (1, (num0
4644 			  - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
4645 				   - bitwidth)));
4646 	}
4647 
4648 #ifdef WORD_REGISTER_OPERATIONS
4649 #ifdef LOAD_EXTEND_OP
4650       /* For paradoxical SUBREGs on machines where all register operations
4651 	 affect the entire register, just look inside.  Note that we are
4652 	 passing MODE to the recursive call, so the number of sign bit copies
4653 	 will remain relative to that mode, not the inner mode.  */
4654 
4655       /* This works only if loads sign extend.  Otherwise, if we get a
4656 	 reload for the inner part, it may be loaded from the stack, and
4657 	 then we lose all sign bit copies that existed before the store
4658 	 to the stack.  */
4659 
4660       if (paradoxical_subreg_p (x)
4661 	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4662 	  && MEM_P (SUBREG_REG (x)))
4663 	return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4664 					   known_x, known_mode, known_ret);
4665 #endif
4666 #endif
4667       break;
4668 
4669     case SIGN_EXTRACT:
4670       if (CONST_INT_P (XEXP (x, 1)))
4671 	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4672       break;
4673 
4674     case SIGN_EXTEND:
4675       return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4676 	      + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4677 					    known_x, known_mode, known_ret));
4678 
4679     case TRUNCATE:
4680       /* For a smaller object, just ignore the high bits.  */
4681       num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4682 					 known_x, known_mode, known_ret);
4683       return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4684 				    - bitwidth)));
4685 
4686     case NOT:
4687       return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4688 					 known_x, known_mode, known_ret);
4689 
4690     case ROTATE:       case ROTATERT:
4691       /* If we are rotating left by a number of bits less than the number
4692 	 of sign bit copies, we can just subtract that amount from the
4693 	 number.  */
4694       if (CONST_INT_P (XEXP (x, 1))
4695 	  && INTVAL (XEXP (x, 1)) >= 0
4696 	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4697 	{
4698 	  num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4699 					     known_x, known_mode, known_ret);
4700 	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4701 				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4702 	}
4703       break;
4704 
4705     case NEG:
4706       /* In general, this subtracts one sign bit copy.  But if the value
4707 	 is known to be positive, the number of sign bit copies is the
4708 	 same as that of the input.  Finally, if the input has just one bit
4709 	 that might be nonzero, all the bits are copies of the sign bit.  */
4710       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4711 					 known_x, known_mode, known_ret);
4712       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4713 	return num0 > 1 ? num0 - 1 : 1;
4714 
4715       nonzero = nonzero_bits (XEXP (x, 0), mode);
4716       if (nonzero == 1)
4717 	return bitwidth;
4718 
4719       if (num0 > 1
4720 	  && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4721 	num0--;
4722 
4723       return num0;
4724 
4725     case IOR:   case AND:   case XOR:
4726     case SMIN:  case SMAX:  case UMIN:  case UMAX:
4727       /* Logical operations will preserve the number of sign-bit copies.
4728 	 MIN and MAX operations always return one of the operands.  */
4729       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4730 					 known_x, known_mode, known_ret);
4731       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4732 					 known_x, known_mode, known_ret);
4733 
4734       /* If num1 is clearing some of the top bits then regardless of
4735 	 the other term, we are guaranteed to have at least that many
4736 	 high-order zero bits.  */
4737       if (code == AND
4738 	  && num1 > 1
4739 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
4740 	  && CONST_INT_P (XEXP (x, 1))
4741 	  && (UINTVAL (XEXP (x, 1))
4742 	      & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
4743 	return num1;
4744 
4745       /* Similarly for IOR when setting high-order bits.  */
4746       if (code == IOR
4747 	  && num1 > 1
4748 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
4749 	  && CONST_INT_P (XEXP (x, 1))
4750 	  && (UINTVAL (XEXP (x, 1))
4751 	      & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4752 	return num1;
4753 
4754       return MIN (num0, num1);
4755 
4756     case PLUS:  case MINUS:
4757       /* For addition and subtraction, we can have a 1-bit carry.  However,
4758 	 if we are subtracting 1 from a positive number, there will not
4759 	 be such a carry.  Furthermore, if the positive number is known to
4760 	 be 0 or 1, we know the result is either -1 or 0.  */
4761 
4762       if (code == PLUS && XEXP (x, 1) == constm1_rtx
4763 	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
4764 	{
4765 	  nonzero = nonzero_bits (XEXP (x, 0), mode);
4766 	  if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4767 	    return (nonzero == 1 || nonzero == 0 ? bitwidth
4768 		    : bitwidth - floor_log2 (nonzero) - 1);
4769 	}
4770 
4771       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4772 					 known_x, known_mode, known_ret);
4773       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4774 					 known_x, known_mode, known_ret);
4775       result = MAX (1, MIN (num0, num1) - 1);
4776 
4777       return result;
4778 
4779     case MULT:
4780       /* The number of bits of the product is the sum of the number of
4781 	 bits of both terms.  However, unless one of the terms if known
4782 	 to be positive, we must allow for an additional bit since negating
4783 	 a negative number can remove one sign bit copy.  */
4784 
4785       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4786 					 known_x, known_mode, known_ret);
4787       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4788 					 known_x, known_mode, known_ret);
4789 
4790       result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4791       if (result > 0
4792 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4793 	      || (((nonzero_bits (XEXP (x, 0), mode)
4794 		    & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4795 		  && ((nonzero_bits (XEXP (x, 1), mode)
4796 		       & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4797 		      != 0))))
4798 	result--;
4799 
4800       return MAX (1, result);
4801 
4802     case UDIV:
4803       /* The result must be <= the first operand.  If the first operand
4804 	 has the high bit set, we know nothing about the number of sign
4805 	 bit copies.  */
4806       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4807 	return 1;
4808       else if ((nonzero_bits (XEXP (x, 0), mode)
4809 		& ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4810 	return 1;
4811       else
4812 	return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4813 					   known_x, known_mode, known_ret);
4814 
4815     case UMOD:
4816       /* The result must be <= the second operand.  If the second operand
4817 	 has (or just might have) the high bit set, we know nothing about
4818 	 the number of sign bit copies.  */
4819       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4820 	return 1;
4821       else if ((nonzero_bits (XEXP (x, 1), mode)
4822 		& ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4823 	return 1;
4824       else
4825 	return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4826 					   known_x, known_mode, known_ret);
4827 
4828     case DIV:
4829       /* Similar to unsigned division, except that we have to worry about
4830 	 the case where the divisor is negative, in which case we have
4831 	 to add 1.  */
4832       result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4833 					   known_x, known_mode, known_ret);
4834       if (result > 1
4835 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4836 	      || (nonzero_bits (XEXP (x, 1), mode)
4837 		  & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4838 	result--;
4839 
4840       return result;
4841 
4842     case MOD:
4843       result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4844 					   known_x, known_mode, known_ret);
4845       if (result > 1
4846 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4847 	      || (nonzero_bits (XEXP (x, 1), mode)
4848 		  & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4849 	result--;
4850 
4851       return result;
4852 
4853     case ASHIFTRT:
4854       /* Shifts by a constant add to the number of bits equal to the
4855 	 sign bit.  */
4856       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4857 					 known_x, known_mode, known_ret);
4858       if (CONST_INT_P (XEXP (x, 1))
4859 	  && INTVAL (XEXP (x, 1)) > 0
4860 	  && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4861 	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4862 
4863       return num0;
4864 
4865     case ASHIFT:
4866       /* Left shifts destroy copies.  */
4867       if (!CONST_INT_P (XEXP (x, 1))
4868 	  || INTVAL (XEXP (x, 1)) < 0
4869 	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4870 	  || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
4871 	return 1;
4872 
4873       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4874 					 known_x, known_mode, known_ret);
4875       return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4876 
4877     case IF_THEN_ELSE:
4878       num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4879 					 known_x, known_mode, known_ret);
4880       num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4881 					 known_x, known_mode, known_ret);
4882       return MIN (num0, num1);
4883 
4884     case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
4885     case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
4886     case GEU: case GTU: case LEU: case LTU:
4887     case UNORDERED: case ORDERED:
4888       /* If the constant is negative, take its 1's complement and remask.
4889 	 Then see how many zero bits we have.  */
4890       nonzero = STORE_FLAG_VALUE;
4891       if (bitwidth <= HOST_BITS_PER_WIDE_INT
4892 	  && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4893 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
4894 
4895       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4896 
4897     default:
4898       break;
4899     }
4900 
4901   /* If we haven't been able to figure it out by one of the above rules,
4902      see if some of the high-order bits are known to be zero.  If so,
4903      count those bits and return one less than that amount.  If we can't
4904      safely compute the mask for this mode, always return BITWIDTH.  */
4905 
4906   bitwidth = GET_MODE_PRECISION (mode);
4907   if (bitwidth > HOST_BITS_PER_WIDE_INT)
4908     return 1;
4909 
4910   nonzero = nonzero_bits (x, mode);
4911   return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
4912 	 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4913 }
4914 
4915 /* Calculate the rtx_cost of a single instruction.  A return value of
4916    zero indicates an instruction pattern without a known cost.  */
4917 
4918 int
4919 insn_rtx_cost (rtx pat, bool speed)
4920 {
4921   int i, cost;
4922   rtx set;
4923 
4924   /* Extract the single set rtx from the instruction pattern.
4925      We can't use single_set since we only have the pattern.  */
4926   if (GET_CODE (pat) == SET)
4927     set = pat;
4928   else if (GET_CODE (pat) == PARALLEL)
4929     {
4930       set = NULL_RTX;
4931       for (i = 0; i < XVECLEN (pat, 0); i++)
4932 	{
4933 	  rtx x = XVECEXP (pat, 0, i);
4934 	  if (GET_CODE (x) == SET)
4935 	    {
4936 	      if (set)
4937 		return 0;
4938 	      set = x;
4939 	    }
4940 	}
4941       if (!set)
4942 	return 0;
4943     }
4944   else
4945     return 0;
4946 
4947   cost = set_src_cost (SET_SRC (set), speed);
4948   return cost > 0 ? cost : COSTS_N_INSNS (1);
4949 }
4950 
4951 /* Returns estimate on cost of computing SEQ.  */
4952 
4953 unsigned
4954 seq_cost (const rtx_insn *seq, bool speed)
4955 {
4956   unsigned cost = 0;
4957   rtx set;
4958 
4959   for (; seq; seq = NEXT_INSN (seq))
4960     {
4961       set = single_set (seq);
4962       if (set)
4963         cost += set_rtx_cost (set, speed);
4964       else
4965         cost++;
4966     }
4967 
4968   return cost;
4969 }
4970 
4971 /* Given an insn INSN and condition COND, return the condition in a
4972    canonical form to simplify testing by callers.  Specifically:
4973 
4974    (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4975    (2) Both operands will be machine operands; (cc0) will have been replaced.
4976    (3) If an operand is a constant, it will be the second operand.
4977    (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4978        for GE, GEU, and LEU.
4979 
4980    If the condition cannot be understood, or is an inequality floating-point
4981    comparison which needs to be reversed, 0 will be returned.
4982 
4983    If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4984 
4985    If EARLIEST is nonzero, it is a pointer to a place where the earliest
4986    insn used in locating the condition was found.  If a replacement test
4987    of the condition is desired, it should be placed in front of that
4988    insn and we will be sure that the inputs are still valid.
4989 
4990    If WANT_REG is nonzero, we wish the condition to be relative to that
4991    register, if possible.  Therefore, do not canonicalize the condition
4992    further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
4993    to be a compare to a CC mode register.
4994 
4995    If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4996    and at INSN.  */
4997 
4998 rtx
4999 canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
5000 			rtx_insn **earliest,
5001 			rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
5002 {
5003   enum rtx_code code;
5004   rtx_insn *prev = insn;
5005   const_rtx set;
5006   rtx tem;
5007   rtx op0, op1;
5008   int reverse_code = 0;
5009   machine_mode mode;
5010   basic_block bb = BLOCK_FOR_INSN (insn);
5011 
5012   code = GET_CODE (cond);
5013   mode = GET_MODE (cond);
5014   op0 = XEXP (cond, 0);
5015   op1 = XEXP (cond, 1);
5016 
5017   if (reverse)
5018     code = reversed_comparison_code (cond, insn);
5019   if (code == UNKNOWN)
5020     return 0;
5021 
5022   if (earliest)
5023     *earliest = insn;
5024 
5025   /* If we are comparing a register with zero, see if the register is set
5026      in the previous insn to a COMPARE or a comparison operation.  Perform
5027      the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
5028      in cse.c  */
5029 
5030   while ((GET_RTX_CLASS (code) == RTX_COMPARE
5031 	  || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
5032 	 && op1 == CONST0_RTX (GET_MODE (op0))
5033 	 && op0 != want_reg)
5034     {
5035       /* Set nonzero when we find something of interest.  */
5036       rtx x = 0;
5037 
5038 #ifdef HAVE_cc0
5039       /* If comparison with cc0, import actual comparison from compare
5040 	 insn.  */
5041       if (op0 == cc0_rtx)
5042 	{
5043 	  if ((prev = prev_nonnote_insn (prev)) == 0
5044 	      || !NONJUMP_INSN_P (prev)
5045 	      || (set = single_set (prev)) == 0
5046 	      || SET_DEST (set) != cc0_rtx)
5047 	    return 0;
5048 
5049 	  op0 = SET_SRC (set);
5050 	  op1 = CONST0_RTX (GET_MODE (op0));
5051 	  if (earliest)
5052 	    *earliest = prev;
5053 	}
5054 #endif
5055 
5056       /* If this is a COMPARE, pick up the two things being compared.  */
5057       if (GET_CODE (op0) == COMPARE)
5058 	{
5059 	  op1 = XEXP (op0, 1);
5060 	  op0 = XEXP (op0, 0);
5061 	  continue;
5062 	}
5063       else if (!REG_P (op0))
5064 	break;
5065 
5066       /* Go back to the previous insn.  Stop if it is not an INSN.  We also
5067 	 stop if it isn't a single set or if it has a REG_INC note because
5068 	 we don't want to bother dealing with it.  */
5069 
5070       prev = prev_nonnote_nondebug_insn (prev);
5071 
5072       if (prev == 0
5073 	  || !NONJUMP_INSN_P (prev)
5074 	  || FIND_REG_INC_NOTE (prev, NULL_RTX)
5075 	  /* In cfglayout mode, there do not have to be labels at the
5076 	     beginning of a block, or jumps at the end, so the previous
5077 	     conditions would not stop us when we reach bb boundary.  */
5078 	  || BLOCK_FOR_INSN (prev) != bb)
5079 	break;
5080 
5081       set = set_of (op0, prev);
5082 
5083       if (set
5084 	  && (GET_CODE (set) != SET
5085 	      || !rtx_equal_p (SET_DEST (set), op0)))
5086 	break;
5087 
5088       /* If this is setting OP0, get what it sets it to if it looks
5089 	 relevant.  */
5090       if (set)
5091 	{
5092 	  machine_mode inner_mode = GET_MODE (SET_DEST (set));
5093 #ifdef FLOAT_STORE_FLAG_VALUE
5094 	  REAL_VALUE_TYPE fsfv;
5095 #endif
5096 
5097 	  /* ??? We may not combine comparisons done in a CCmode with
5098 	     comparisons not done in a CCmode.  This is to aid targets
5099 	     like Alpha that have an IEEE compliant EQ instruction, and
5100 	     a non-IEEE compliant BEQ instruction.  The use of CCmode is
5101 	     actually artificial, simply to prevent the combination, but
5102 	     should not affect other platforms.
5103 
5104 	     However, we must allow VOIDmode comparisons to match either
5105 	     CCmode or non-CCmode comparison, because some ports have
5106 	     modeless comparisons inside branch patterns.
5107 
5108 	     ??? This mode check should perhaps look more like the mode check
5109 	     in simplify_comparison in combine.  */
5110 	  if (((GET_MODE_CLASS (mode) == MODE_CC)
5111 	       != (GET_MODE_CLASS (inner_mode) == MODE_CC))
5112 	      && mode != VOIDmode
5113 	      && inner_mode != VOIDmode)
5114 	    break;
5115 	  if (GET_CODE (SET_SRC (set)) == COMPARE
5116 	      || (((code == NE
5117 		    || (code == LT
5118 			&& val_signbit_known_set_p (inner_mode,
5119 						    STORE_FLAG_VALUE))
5120 #ifdef FLOAT_STORE_FLAG_VALUE
5121 		    || (code == LT
5122 			&& SCALAR_FLOAT_MODE_P (inner_mode)
5123 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5124 			    REAL_VALUE_NEGATIVE (fsfv)))
5125 #endif
5126 		    ))
5127 		  && COMPARISON_P (SET_SRC (set))))
5128 	    x = SET_SRC (set);
5129 	  else if (((code == EQ
5130 		     || (code == GE
5131 			 && val_signbit_known_set_p (inner_mode,
5132 						     STORE_FLAG_VALUE))
5133 #ifdef FLOAT_STORE_FLAG_VALUE
5134 		     || (code == GE
5135 			 && SCALAR_FLOAT_MODE_P (inner_mode)
5136 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5137 			     REAL_VALUE_NEGATIVE (fsfv)))
5138 #endif
5139 		     ))
5140 		   && COMPARISON_P (SET_SRC (set)))
5141 	    {
5142 	      reverse_code = 1;
5143 	      x = SET_SRC (set);
5144 	    }
5145 	  else if ((code == EQ || code == NE)
5146 		   && GET_CODE (SET_SRC (set)) == XOR)
5147 	    /* Handle sequences like:
5148 
5149 	       (set op0 (xor X Y))
5150 	       ...(eq|ne op0 (const_int 0))...
5151 
5152 	       in which case:
5153 
5154 	       (eq op0 (const_int 0)) reduces to (eq X Y)
5155 	       (ne op0 (const_int 0)) reduces to (ne X Y)
5156 
5157 	       This is the form used by MIPS16, for example.  */
5158 	    x = SET_SRC (set);
5159 	  else
5160 	    break;
5161 	}
5162 
5163       else if (reg_set_p (op0, prev))
5164 	/* If this sets OP0, but not directly, we have to give up.  */
5165 	break;
5166 
5167       if (x)
5168 	{
5169 	  /* If the caller is expecting the condition to be valid at INSN,
5170 	     make sure X doesn't change before INSN.  */
5171 	  if (valid_at_insn_p)
5172 	    if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5173 	      break;
5174 	  if (COMPARISON_P (x))
5175 	    code = GET_CODE (x);
5176 	  if (reverse_code)
5177 	    {
5178 	      code = reversed_comparison_code (x, prev);
5179 	      if (code == UNKNOWN)
5180 		return 0;
5181 	      reverse_code = 0;
5182 	    }
5183 
5184 	  op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5185 	  if (earliest)
5186 	    *earliest = prev;
5187 	}
5188     }
5189 
5190   /* If constant is first, put it last.  */
5191   if (CONSTANT_P (op0))
5192     code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5193 
5194   /* If OP0 is the result of a comparison, we weren't able to find what
5195      was really being compared, so fail.  */
5196   if (!allow_cc_mode
5197       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5198     return 0;
5199 
5200   /* Canonicalize any ordered comparison with integers involving equality
5201      if we can do computations in the relevant mode and we do not
5202      overflow.  */
5203 
5204   if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
5205       && CONST_INT_P (op1)
5206       && GET_MODE (op0) != VOIDmode
5207       && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
5208     {
5209       HOST_WIDE_INT const_val = INTVAL (op1);
5210       unsigned HOST_WIDE_INT uconst_val = const_val;
5211       unsigned HOST_WIDE_INT max_val
5212 	= (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5213 
5214       switch (code)
5215 	{
5216 	case LE:
5217 	  if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5218 	    code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5219 	  break;
5220 
5221 	/* When cross-compiling, const_val might be sign-extended from
5222 	   BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5223 	case GE:
5224 	  if ((const_val & max_val)
5225 	      != ((unsigned HOST_WIDE_INT) 1
5226 		  << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
5227 	    code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5228 	  break;
5229 
5230 	case LEU:
5231 	  if (uconst_val < max_val)
5232 	    code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5233 	  break;
5234 
5235 	case GEU:
5236 	  if (uconst_val != 0)
5237 	    code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5238 	  break;
5239 
5240 	default:
5241 	  break;
5242 	}
5243     }
5244 
5245   /* Never return CC0; return zero instead.  */
5246   if (CC0_P (op0))
5247     return 0;
5248 
5249   return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5250 }
5251 
5252 /* Given a jump insn JUMP, return the condition that will cause it to branch
5253    to its JUMP_LABEL.  If the condition cannot be understood, or is an
5254    inequality floating-point comparison which needs to be reversed, 0 will
5255    be returned.
5256 
5257    If EARLIEST is nonzero, it is a pointer to a place where the earliest
5258    insn used in locating the condition was found.  If a replacement test
5259    of the condition is desired, it should be placed in front of that
5260    insn and we will be sure that the inputs are still valid.  If EARLIEST
5261    is null, the returned condition will be valid at INSN.
5262 
5263    If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5264    compare CC mode register.
5265 
5266    VALID_AT_INSN_P is the same as for canonicalize_condition.  */
5267 
5268 rtx
5269 get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
5270 	       int valid_at_insn_p)
5271 {
5272   rtx cond;
5273   int reverse;
5274   rtx set;
5275 
5276   /* If this is not a standard conditional jump, we can't parse it.  */
5277   if (!JUMP_P (jump)
5278       || ! any_condjump_p (jump))
5279     return 0;
5280   set = pc_set (jump);
5281 
5282   cond = XEXP (SET_SRC (set), 0);
5283 
5284   /* If this branches to JUMP_LABEL when the condition is false, reverse
5285      the condition.  */
5286   reverse
5287     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
5288       && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
5289 
5290   return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5291 				 allow_cc_mode, valid_at_insn_p);
5292 }
5293 
5294 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5295    TARGET_MODE_REP_EXTENDED.
5296 
5297    Note that we assume that the property of
5298    TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5299    narrower than mode B.  I.e., if A is a mode narrower than B then in
5300    order to be able to operate on it in mode B, mode A needs to
5301    satisfy the requirements set by the representation of mode B.  */
5302 
5303 static void
5304 init_num_sign_bit_copies_in_rep (void)
5305 {
5306   machine_mode mode, in_mode;
5307 
5308   for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5309        in_mode = GET_MODE_WIDER_MODE (mode))
5310     for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5311 	 mode = GET_MODE_WIDER_MODE (mode))
5312       {
5313 	machine_mode i;
5314 
5315 	/* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5316 	   extends to the next widest mode.  */
5317 	gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5318 		    || GET_MODE_WIDER_MODE (mode) == in_mode);
5319 
5320 	/* We are in in_mode.  Count how many bits outside of mode
5321 	   have to be copies of the sign-bit.  */
5322 	for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5323 	  {
5324 	    machine_mode wider = GET_MODE_WIDER_MODE (i);
5325 
5326 	    if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5327 		/* We can only check sign-bit copies starting from the
5328 		   top-bit.  In order to be able to check the bits we
5329 		   have already seen we pretend that subsequent bits
5330 		   have to be sign-bit copies too.  */
5331 		|| num_sign_bit_copies_in_rep [in_mode][mode])
5332 	      num_sign_bit_copies_in_rep [in_mode][mode]
5333 		+= GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
5334 	  }
5335       }
5336 }
5337 
5338 /* Suppose that truncation from the machine mode of X to MODE is not a
5339    no-op.  See if there is anything special about X so that we can
5340    assume it already contains a truncated value of MODE.  */
5341 
5342 bool
5343 truncated_to_mode (machine_mode mode, const_rtx x)
5344 {
5345   /* This register has already been used in MODE without explicit
5346      truncation.  */
5347   if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5348     return true;
5349 
5350   /* See if we already satisfy the requirements of MODE.  If yes we
5351      can just switch to MODE.  */
5352   if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5353       && (num_sign_bit_copies (x, GET_MODE (x))
5354 	  >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5355     return true;
5356 
5357   return false;
5358 }
5359 
5360 /* Return true if RTX code CODE has a single sequence of zero or more
5361    "e" operands and no rtvec operands.  Initialize its rtx_all_subrtx_bounds
5362    entry in that case.  */
5363 
5364 static bool
5365 setup_reg_subrtx_bounds (unsigned int code)
5366 {
5367   const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
5368   unsigned int i = 0;
5369   for (; format[i] != 'e'; ++i)
5370     {
5371       if (!format[i])
5372 	/* No subrtxes.  Leave start and count as 0.  */
5373 	return true;
5374       if (format[i] == 'E' || format[i] == 'V')
5375 	return false;
5376     }
5377 
5378   /* Record the sequence of 'e's.  */
5379   rtx_all_subrtx_bounds[code].start = i;
5380   do
5381     ++i;
5382   while (format[i] == 'e');
5383   rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
5384   /* rtl-iter.h relies on this.  */
5385   gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
5386 
5387   for (; format[i]; ++i)
5388     if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
5389       return false;
5390 
5391   return true;
5392 }
5393 
5394 /* Initialize rtx_all_subrtx_bounds.  */
5395 void
5396 init_rtlanal (void)
5397 {
5398   int i;
5399   for (i = 0; i < NUM_RTX_CODE; i++)
5400     {
5401       if (!setup_reg_subrtx_bounds (i))
5402 	rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
5403       if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
5404 	rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
5405     }
5406 
5407   init_num_sign_bit_copies_in_rep ();
5408 }
5409 
5410 /* Check whether this is a constant pool constant.  */
5411 bool
5412 constant_pool_constant_p (rtx x)
5413 {
5414   x = avoid_constant_pool_reference (x);
5415   return CONST_DOUBLE_P (x);
5416 }
5417 
5418 /* If M is a bitmask that selects a field of low-order bits within an item but
5419    not the entire word, return the length of the field.  Return -1 otherwise.
5420    M is used in machine mode MODE.  */
5421 
5422 int
5423 low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
5424 {
5425   if (mode != VOIDmode)
5426     {
5427       if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
5428 	return -1;
5429       m &= GET_MODE_MASK (mode);
5430     }
5431 
5432   return exact_log2 (m + 1);
5433 }
5434 
5435 /* Return the mode of MEM's address.  */
5436 
5437 machine_mode
5438 get_address_mode (rtx mem)
5439 {
5440   machine_mode mode;
5441 
5442   gcc_assert (MEM_P (mem));
5443   mode = GET_MODE (XEXP (mem, 0));
5444   if (mode != VOIDmode)
5445     return mode;
5446   return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
5447 }
5448 
5449 /* Split up a CONST_DOUBLE or integer constant rtx
5450    into two rtx's for single words,
5451    storing in *FIRST the word that comes first in memory in the target
5452    and in *SECOND the other.
5453 
5454    TODO: This function needs to be rewritten to work on any size
5455    integer.  */
5456 
5457 void
5458 split_double (rtx value, rtx *first, rtx *second)
5459 {
5460   if (CONST_INT_P (value))
5461     {
5462       if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
5463 	{
5464 	  /* In this case the CONST_INT holds both target words.
5465 	     Extract the bits from it into two word-sized pieces.
5466 	     Sign extend each half to HOST_WIDE_INT.  */
5467 	  unsigned HOST_WIDE_INT low, high;
5468 	  unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
5469 	  unsigned bits_per_word = BITS_PER_WORD;
5470 
5471 	  /* Set sign_bit to the most significant bit of a word.  */
5472 	  sign_bit = 1;
5473 	  sign_bit <<= bits_per_word - 1;
5474 
5475 	  /* Set mask so that all bits of the word are set.  We could
5476 	     have used 1 << BITS_PER_WORD instead of basing the
5477 	     calculation on sign_bit.  However, on machines where
5478 	     HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
5479 	     compiler warning, even though the code would never be
5480 	     executed.  */
5481 	  mask = sign_bit << 1;
5482 	  mask--;
5483 
5484 	  /* Set sign_extend as any remaining bits.  */
5485 	  sign_extend = ~mask;
5486 
5487 	  /* Pick the lower word and sign-extend it.  */
5488 	  low = INTVAL (value);
5489 	  low &= mask;
5490 	  if (low & sign_bit)
5491 	    low |= sign_extend;
5492 
5493 	  /* Pick the higher word, shifted to the least significant
5494 	     bits, and sign-extend it.  */
5495 	  high = INTVAL (value);
5496 	  high >>= bits_per_word - 1;
5497 	  high >>= 1;
5498 	  high &= mask;
5499 	  if (high & sign_bit)
5500 	    high |= sign_extend;
5501 
5502 	  /* Store the words in the target machine order.  */
5503 	  if (WORDS_BIG_ENDIAN)
5504 	    {
5505 	      *first = GEN_INT (high);
5506 	      *second = GEN_INT (low);
5507 	    }
5508 	  else
5509 	    {
5510 	      *first = GEN_INT (low);
5511 	      *second = GEN_INT (high);
5512 	    }
5513 	}
5514       else
5515 	{
5516 	  /* The rule for using CONST_INT for a wider mode
5517 	     is that we regard the value as signed.
5518 	     So sign-extend it.  */
5519 	  rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
5520 	  if (WORDS_BIG_ENDIAN)
5521 	    {
5522 	      *first = high;
5523 	      *second = value;
5524 	    }
5525 	  else
5526 	    {
5527 	      *first = value;
5528 	      *second = high;
5529 	    }
5530 	}
5531     }
5532   else if (GET_CODE (value) == CONST_WIDE_INT)
5533     {
5534       /* All of this is scary code and needs to be converted to
5535 	 properly work with any size integer.  */
5536       gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
5537       if (WORDS_BIG_ENDIAN)
5538 	{
5539 	  *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5540 	  *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5541 	}
5542       else
5543 	{
5544 	  *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5545 	  *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5546 	}
5547     }
5548   else if (!CONST_DOUBLE_P (value))
5549     {
5550       if (WORDS_BIG_ENDIAN)
5551 	{
5552 	  *first = const0_rtx;
5553 	  *second = value;
5554 	}
5555       else
5556 	{
5557 	  *first = value;
5558 	  *second = const0_rtx;
5559 	}
5560     }
5561   else if (GET_MODE (value) == VOIDmode
5562 	   /* This is the old way we did CONST_DOUBLE integers.  */
5563 	   || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
5564     {
5565       /* In an integer, the words are defined as most and least significant.
5566 	 So order them by the target's convention.  */
5567       if (WORDS_BIG_ENDIAN)
5568 	{
5569 	  *first = GEN_INT (CONST_DOUBLE_HIGH (value));
5570 	  *second = GEN_INT (CONST_DOUBLE_LOW (value));
5571 	}
5572       else
5573 	{
5574 	  *first = GEN_INT (CONST_DOUBLE_LOW (value));
5575 	  *second = GEN_INT (CONST_DOUBLE_HIGH (value));
5576 	}
5577     }
5578   else
5579     {
5580       REAL_VALUE_TYPE r;
5581       long l[2];
5582       REAL_VALUE_FROM_CONST_DOUBLE (r, value);
5583 
5584       /* Note, this converts the REAL_VALUE_TYPE to the target's
5585 	 format, splits up the floating point double and outputs
5586 	 exactly 32 bits of it into each of l[0] and l[1] --
5587 	 not necessarily BITS_PER_WORD bits.  */
5588       REAL_VALUE_TO_TARGET_DOUBLE (r, l);
5589 
5590       /* If 32 bits is an entire word for the target, but not for the host,
5591 	 then sign-extend on the host so that the number will look the same
5592 	 way on the host that it would on the target.  See for instance
5593 	 simplify_unary_operation.  The #if is needed to avoid compiler
5594 	 warnings.  */
5595 
5596 #if HOST_BITS_PER_LONG > 32
5597       if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
5598 	{
5599 	  if (l[0] & ((long) 1 << 31))
5600 	    l[0] |= ((long) (-1) << 32);
5601 	  if (l[1] & ((long) 1 << 31))
5602 	    l[1] |= ((long) (-1) << 32);
5603 	}
5604 #endif
5605 
5606       *first = GEN_INT (l[0]);
5607       *second = GEN_INT (l[1]);
5608     }
5609 }
5610 
5611 /* Return true if X is a sign_extract or zero_extract from the least
5612    significant bit.  */
5613 
5614 static bool
5615 lsb_bitfield_op_p (rtx x)
5616 {
5617   if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
5618     {
5619       machine_mode mode = GET_MODE (XEXP (x, 0));
5620       HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
5621       HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
5622 
5623       return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0));
5624     }
5625   return false;
5626 }
5627 
5628 /* Strip outer address "mutations" from LOC and return a pointer to the
5629    inner value.  If OUTER_CODE is nonnull, store the code of the innermost
5630    stripped expression there.
5631 
5632    "Mutations" either convert between modes or apply some kind of
5633    extension, truncation or alignment.  */
5634 
5635 rtx *
5636 strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
5637 {
5638   for (;;)
5639     {
5640       enum rtx_code code = GET_CODE (*loc);
5641       if (GET_RTX_CLASS (code) == RTX_UNARY)
5642 	/* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
5643 	   used to convert between pointer sizes.  */
5644 	loc = &XEXP (*loc, 0);
5645       else if (lsb_bitfield_op_p (*loc))
5646 	/* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
5647 	   acts as a combined truncation and extension.  */
5648 	loc = &XEXP (*loc, 0);
5649       else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
5650 	/* (and ... (const_int -X)) is used to align to X bytes.  */
5651 	loc = &XEXP (*loc, 0);
5652       else if (code == SUBREG
5653                && !OBJECT_P (SUBREG_REG (*loc))
5654                && subreg_lowpart_p (*loc))
5655 	/* (subreg (operator ...) ...) inside and is used for mode
5656 	   conversion too.  */
5657 	loc = &SUBREG_REG (*loc);
5658       else
5659 	return loc;
5660       if (outer_code)
5661 	*outer_code = code;
5662     }
5663 }
5664 
5665 /* Return true if CODE applies some kind of scale.  The scaled value is
5666    is the first operand and the scale is the second.  */
5667 
5668 static bool
5669 binary_scale_code_p (enum rtx_code code)
5670 {
5671   return (code == MULT
5672           || code == ASHIFT
5673           /* Needed by ARM targets.  */
5674           || code == ASHIFTRT
5675           || code == LSHIFTRT
5676           || code == ROTATE
5677           || code == ROTATERT);
5678 }
5679 
5680 /* If *INNER can be interpreted as a base, return a pointer to the inner term
5681    (see address_info).  Return null otherwise.  */
5682 
5683 static rtx *
5684 get_base_term (rtx *inner)
5685 {
5686   if (GET_CODE (*inner) == LO_SUM)
5687     inner = strip_address_mutations (&XEXP (*inner, 0));
5688   if (REG_P (*inner)
5689       || MEM_P (*inner)
5690       || GET_CODE (*inner) == SUBREG
5691       || GET_CODE (*inner) == SCRATCH)
5692     return inner;
5693   return 0;
5694 }
5695 
5696 /* If *INNER can be interpreted as an index, return a pointer to the inner term
5697    (see address_info).  Return null otherwise.  */
5698 
5699 static rtx *
5700 get_index_term (rtx *inner)
5701 {
5702   /* At present, only constant scales are allowed.  */
5703   if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
5704     inner = strip_address_mutations (&XEXP (*inner, 0));
5705   if (REG_P (*inner)
5706       || MEM_P (*inner)
5707       || GET_CODE (*inner) == SUBREG
5708       || GET_CODE (*inner) == SCRATCH)
5709     return inner;
5710   return 0;
5711 }
5712 
5713 /* Set the segment part of address INFO to LOC, given that INNER is the
5714    unmutated value.  */
5715 
5716 static void
5717 set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
5718 {
5719   gcc_assert (!info->segment);
5720   info->segment = loc;
5721   info->segment_term = inner;
5722 }
5723 
5724 /* Set the base part of address INFO to LOC, given that INNER is the
5725    unmutated value.  */
5726 
5727 static void
5728 set_address_base (struct address_info *info, rtx *loc, rtx *inner)
5729 {
5730   gcc_assert (!info->base);
5731   info->base = loc;
5732   info->base_term = inner;
5733 }
5734 
5735 /* Set the index part of address INFO to LOC, given that INNER is the
5736    unmutated value.  */
5737 
5738 static void
5739 set_address_index (struct address_info *info, rtx *loc, rtx *inner)
5740 {
5741   gcc_assert (!info->index);
5742   info->index = loc;
5743   info->index_term = inner;
5744 }
5745 
5746 /* Set the displacement part of address INFO to LOC, given that INNER
5747    is the constant term.  */
5748 
5749 static void
5750 set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
5751 {
5752   gcc_assert (!info->disp);
5753   info->disp = loc;
5754   info->disp_term = inner;
5755 }
5756 
5757 /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address.  Set up the
5758    rest of INFO accordingly.  */
5759 
5760 static void
5761 decompose_incdec_address (struct address_info *info)
5762 {
5763   info->autoinc_p = true;
5764 
5765   rtx *base = &XEXP (*info->inner, 0);
5766   set_address_base (info, base, base);
5767   gcc_checking_assert (info->base == info->base_term);
5768 
5769   /* These addresses are only valid when the size of the addressed
5770      value is known.  */
5771   gcc_checking_assert (info->mode != VOIDmode);
5772 }
5773 
5774 /* INFO->INNER describes a {PRE,POST}_MODIFY address.  Set up the rest
5775    of INFO accordingly.  */
5776 
5777 static void
5778 decompose_automod_address (struct address_info *info)
5779 {
5780   info->autoinc_p = true;
5781 
5782   rtx *base = &XEXP (*info->inner, 0);
5783   set_address_base (info, base, base);
5784   gcc_checking_assert (info->base == info->base_term);
5785 
5786   rtx plus = XEXP (*info->inner, 1);
5787   gcc_assert (GET_CODE (plus) == PLUS);
5788 
5789   info->base_term2 = &XEXP (plus, 0);
5790   gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
5791 
5792   rtx *step = &XEXP (plus, 1);
5793   rtx *inner_step = strip_address_mutations (step);
5794   if (CONSTANT_P (*inner_step))
5795     set_address_disp (info, step, inner_step);
5796   else
5797     set_address_index (info, step, inner_step);
5798 }
5799 
5800 /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
5801    values in [PTR, END).  Return a pointer to the end of the used array.  */
5802 
5803 static rtx **
5804 extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
5805 {
5806   rtx x = *loc;
5807   if (GET_CODE (x) == PLUS)
5808     {
5809       ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
5810       ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
5811     }
5812   else
5813     {
5814       gcc_assert (ptr != end);
5815       *ptr++ = loc;
5816     }
5817   return ptr;
5818 }
5819 
5820 /* Evaluate the likelihood of X being a base or index value, returning
5821    positive if it is likely to be a base, negative if it is likely to be
5822    an index, and 0 if we can't tell.  Make the magnitude of the return
5823    value reflect the amount of confidence we have in the answer.
5824 
5825    MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1.  */
5826 
5827 static int
5828 baseness (rtx x, machine_mode mode, addr_space_t as,
5829 	  enum rtx_code outer_code, enum rtx_code index_code)
5830 {
5831   /* Believe *_POINTER unless the address shape requires otherwise.  */
5832   if (REG_P (x) && REG_POINTER (x))
5833     return 2;
5834   if (MEM_P (x) && MEM_POINTER (x))
5835     return 2;
5836 
5837   if (REG_P (x) && HARD_REGISTER_P (x))
5838     {
5839       /* X is a hard register.  If it only fits one of the base
5840 	 or index classes, choose that interpretation.  */
5841       int regno = REGNO (x);
5842       bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
5843       bool index_p = REGNO_OK_FOR_INDEX_P (regno);
5844       if (base_p != index_p)
5845 	return base_p ? 1 : -1;
5846     }
5847   return 0;
5848 }
5849 
5850 /* INFO->INNER describes a normal, non-automodified address.
5851    Fill in the rest of INFO accordingly.  */
5852 
5853 static void
5854 decompose_normal_address (struct address_info *info)
5855 {
5856   /* Treat the address as the sum of up to four values.  */
5857   rtx *ops[4];
5858   size_t n_ops = extract_plus_operands (info->inner, ops,
5859 					ops + ARRAY_SIZE (ops)) - ops;
5860 
5861   /* If there is more than one component, any base component is in a PLUS.  */
5862   if (n_ops > 1)
5863     info->base_outer_code = PLUS;
5864 
5865   /* Try to classify each sum operand now.  Leave those that could be
5866      either a base or an index in OPS.  */
5867   rtx *inner_ops[4];
5868   size_t out = 0;
5869   for (size_t in = 0; in < n_ops; ++in)
5870     {
5871       rtx *loc = ops[in];
5872       rtx *inner = strip_address_mutations (loc);
5873       if (CONSTANT_P (*inner))
5874 	set_address_disp (info, loc, inner);
5875       else if (GET_CODE (*inner) == UNSPEC)
5876 	set_address_segment (info, loc, inner);
5877       else
5878 	{
5879 	  /* The only other possibilities are a base or an index.  */
5880 	  rtx *base_term = get_base_term (inner);
5881 	  rtx *index_term = get_index_term (inner);
5882 	  gcc_assert (base_term || index_term);
5883 	  if (!base_term)
5884 	    set_address_index (info, loc, index_term);
5885 	  else if (!index_term)
5886 	    set_address_base (info, loc, base_term);
5887 	  else
5888 	    {
5889 	      gcc_assert (base_term == index_term);
5890 	      ops[out] = loc;
5891 	      inner_ops[out] = base_term;
5892 	      ++out;
5893 	    }
5894 	}
5895     }
5896 
5897   /* Classify the remaining OPS members as bases and indexes.  */
5898   if (out == 1)
5899     {
5900       /* If we haven't seen a base or an index yet, assume that this is
5901 	 the base.  If we were confident that another term was the base
5902 	 or index, treat the remaining operand as the other kind.  */
5903       if (!info->base)
5904 	set_address_base (info, ops[0], inner_ops[0]);
5905       else
5906 	set_address_index (info, ops[0], inner_ops[0]);
5907     }
5908   else if (out == 2)
5909     {
5910       /* In the event of a tie, assume the base comes first.  */
5911       if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
5912 		    GET_CODE (*ops[1]))
5913 	  >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
5914 		       GET_CODE (*ops[0])))
5915 	{
5916 	  set_address_base (info, ops[0], inner_ops[0]);
5917 	  set_address_index (info, ops[1], inner_ops[1]);
5918 	}
5919       else
5920 	{
5921 	  set_address_base (info, ops[1], inner_ops[1]);
5922 	  set_address_index (info, ops[0], inner_ops[0]);
5923 	}
5924     }
5925   else
5926     gcc_assert (out == 0);
5927 }
5928 
5929 /* Describe address *LOC in *INFO.  MODE is the mode of the addressed value,
5930    or VOIDmode if not known.  AS is the address space associated with LOC.
5931    OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise.  */
5932 
5933 void
5934 decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
5935 		   addr_space_t as, enum rtx_code outer_code)
5936 {
5937   memset (info, 0, sizeof (*info));
5938   info->mode = mode;
5939   info->as = as;
5940   info->addr_outer_code = outer_code;
5941   info->outer = loc;
5942   info->inner = strip_address_mutations (loc, &outer_code);
5943   info->base_outer_code = outer_code;
5944   switch (GET_CODE (*info->inner))
5945     {
5946     case PRE_DEC:
5947     case PRE_INC:
5948     case POST_DEC:
5949     case POST_INC:
5950       decompose_incdec_address (info);
5951       break;
5952 
5953     case PRE_MODIFY:
5954     case POST_MODIFY:
5955       decompose_automod_address (info);
5956       break;
5957 
5958     default:
5959       decompose_normal_address (info);
5960       break;
5961     }
5962 }
5963 
5964 /* Describe address operand LOC in INFO.  */
5965 
5966 void
5967 decompose_lea_address (struct address_info *info, rtx *loc)
5968 {
5969   decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
5970 }
5971 
5972 /* Describe the address of MEM X in INFO.  */
5973 
5974 void
5975 decompose_mem_address (struct address_info *info, rtx x)
5976 {
5977   gcc_assert (MEM_P (x));
5978   decompose_address (info, &XEXP (x, 0), GET_MODE (x),
5979 		     MEM_ADDR_SPACE (x), MEM);
5980 }
5981 
5982 /* Update INFO after a change to the address it describes.  */
5983 
5984 void
5985 update_address (struct address_info *info)
5986 {
5987   decompose_address (info, info->outer, info->mode, info->as,
5988 		     info->addr_outer_code);
5989 }
5990 
5991 /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
5992    more complicated than that.  */
5993 
5994 HOST_WIDE_INT
5995 get_index_scale (const struct address_info *info)
5996 {
5997   rtx index = *info->index;
5998   if (GET_CODE (index) == MULT
5999       && CONST_INT_P (XEXP (index, 1))
6000       && info->index_term == &XEXP (index, 0))
6001     return INTVAL (XEXP (index, 1));
6002 
6003   if (GET_CODE (index) == ASHIFT
6004       && CONST_INT_P (XEXP (index, 1))
6005       && info->index_term == &XEXP (index, 0))
6006     return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1));
6007 
6008   if (info->index == info->index_term)
6009     return 1;
6010 
6011   return 0;
6012 }
6013 
6014 /* Return the "index code" of INFO, in the form required by
6015    ok_for_base_p_1.  */
6016 
6017 enum rtx_code
6018 get_index_code (const struct address_info *info)
6019 {
6020   if (info->index)
6021     return GET_CODE (*info->index);
6022 
6023   if (info->disp)
6024     return GET_CODE (*info->disp);
6025 
6026   return SCRATCH;
6027 }
6028 
6029 /* Return true if X contains a thread-local symbol.  */
6030 
6031 bool
6032 tls_referenced_p (const_rtx x)
6033 {
6034   if (!targetm.have_tls)
6035     return false;
6036 
6037   subrtx_iterator::array_type array;
6038   FOR_EACH_SUBRTX (iter, array, x, ALL)
6039     if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
6040       return true;
6041   return false;
6042 }
6043