xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/rtlanal.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Analyze RTL for GNU compiler.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
39 #include "df.h"
40 #include "tree.h"
41 
42 /* Forward declarations */
43 static void set_of_1 (rtx, const_rtx, void *);
44 static bool covers_regno_p (const_rtx, unsigned int);
45 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
46 static int rtx_referenced_p_1 (rtx *, void *);
47 static int computed_jump_p_1 (const_rtx);
48 static void parms_set (rtx, const_rtx, void *);
49 
50 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
51                                                    const_rtx, enum machine_mode,
52                                                    unsigned HOST_WIDE_INT);
53 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
54 					     const_rtx, enum machine_mode,
55                                              unsigned HOST_WIDE_INT);
56 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
57                                                 enum machine_mode,
58                                                 unsigned int);
59 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
60                                           enum machine_mode, unsigned int);
61 
62 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
63    -1 if a code has no such operand.  */
64 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 
66 /* Bit flags that specify the machine subtype we are compiling for.
67    Bits are tested using macros TARGET_... defined in the tm.h file
68    and set by `-m...' switches.  Must be defined in rtlanal.c.  */
69 
70 int target_flags;
71 
72 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
73    If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
74    SIGN_EXTEND then while narrowing we also have to enforce the
75    representation and sign-extend the value to mode DESTINATION_REP.
76 
77    If the value is already sign-extended to DESTINATION_REP mode we
78    can just switch to DESTINATION mode on it.  For each pair of
79    integral modes SOURCE and DESTINATION, when truncating from SOURCE
80    to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
81    contains the number of high-order bits in SOURCE that have to be
82    copies of the sign-bit so that we can do this mode-switch to
83    DESTINATION.  */
84 
85 static unsigned int
86 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
87 
88 /* Return 1 if the value of X is unstable
89    (would be different at a different point in the program).
90    The frame pointer, arg pointer, etc. are considered stable
91    (within one function) and so is anything marked `unchanging'.  */
92 
93 int
94 rtx_unstable_p (const_rtx x)
95 {
96   const RTX_CODE code = GET_CODE (x);
97   int i;
98   const char *fmt;
99 
100   switch (code)
101     {
102     case MEM:
103       return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
104 
105     case CONST:
106     case CONST_INT:
107     case CONST_DOUBLE:
108     case CONST_FIXED:
109     case CONST_VECTOR:
110     case SYMBOL_REF:
111     case LABEL_REF:
112       return 0;
113 
114     case REG:
115       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
116       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
117 	  /* The arg pointer varies if it is not a fixed register.  */
118 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
119 	return 0;
120 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
121       /* ??? When call-clobbered, the value is stable modulo the restore
122 	 that must happen after a call.  This currently screws up local-alloc
123 	 into believing that the restore is not needed.  */
124       if (x == pic_offset_table_rtx)
125 	return 0;
126 #endif
127       return 1;
128 
129     case ASM_OPERANDS:
130       if (MEM_VOLATILE_P (x))
131 	return 1;
132 
133       /* Fall through.  */
134 
135     default:
136       break;
137     }
138 
139   fmt = GET_RTX_FORMAT (code);
140   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
141     if (fmt[i] == 'e')
142       {
143 	if (rtx_unstable_p (XEXP (x, i)))
144 	  return 1;
145       }
146     else if (fmt[i] == 'E')
147       {
148 	int j;
149 	for (j = 0; j < XVECLEN (x, i); j++)
150 	  if (rtx_unstable_p (XVECEXP (x, i, j)))
151 	    return 1;
152       }
153 
154   return 0;
155 }
156 
157 /* Return 1 if X has a value that can vary even between two
158    executions of the program.  0 means X can be compared reliably
159    against certain constants or near-constants.
160    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
161    zero, we are slightly more conservative.
162    The frame pointer and the arg pointer are considered constant.  */
163 
164 bool
165 rtx_varies_p (const_rtx x, bool for_alias)
166 {
167   RTX_CODE code;
168   int i;
169   const char *fmt;
170 
171   if (!x)
172     return 0;
173 
174   code = GET_CODE (x);
175   switch (code)
176     {
177     case MEM:
178       return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
179 
180     case CONST:
181     case CONST_INT:
182     case CONST_DOUBLE:
183     case CONST_FIXED:
184     case CONST_VECTOR:
185     case SYMBOL_REF:
186     case LABEL_REF:
187       return 0;
188 
189     case REG:
190       /* Note that we have to test for the actual rtx used for the frame
191 	 and arg pointers and not just the register number in case we have
192 	 eliminated the frame and/or arg pointer and are using it
193 	 for pseudos.  */
194       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
195 	  /* The arg pointer varies if it is not a fixed register.  */
196 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
197 	return 0;
198       if (x == pic_offset_table_rtx
199 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
200 	  /* ??? When call-clobbered, the value is stable modulo the restore
201 	     that must happen after a call.  This currently screws up
202 	     local-alloc into believing that the restore is not needed, so we
203 	     must return 0 only if we are called from alias analysis.  */
204 	  && for_alias
205 #endif
206 	  )
207 	return 0;
208       return 1;
209 
210     case LO_SUM:
211       /* The operand 0 of a LO_SUM is considered constant
212 	 (in fact it is related specifically to operand 1)
213 	 during alias analysis.  */
214       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
215 	     || rtx_varies_p (XEXP (x, 1), for_alias);
216 
217     case ASM_OPERANDS:
218       if (MEM_VOLATILE_P (x))
219 	return 1;
220 
221       /* Fall through.  */
222 
223     default:
224       break;
225     }
226 
227   fmt = GET_RTX_FORMAT (code);
228   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
229     if (fmt[i] == 'e')
230       {
231 	if (rtx_varies_p (XEXP (x, i), for_alias))
232 	  return 1;
233       }
234     else if (fmt[i] == 'E')
235       {
236 	int j;
237 	for (j = 0; j < XVECLEN (x, i); j++)
238 	  if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
239 	    return 1;
240       }
241 
242   return 0;
243 }
244 
245 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
246    MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
247    whether nonzero is returned for unaligned memory accesses on strict
248    alignment machines.  */
249 
250 static int
251 rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
252 		       enum machine_mode mode, bool unaligned_mems)
253 {
254   enum rtx_code code = GET_CODE (x);
255 
256   if (STRICT_ALIGNMENT
257       && unaligned_mems
258       && GET_MODE_SIZE (mode) != 0)
259     {
260       HOST_WIDE_INT actual_offset = offset;
261 #ifdef SPARC_STACK_BOUNDARY_HACK
262       /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
263 	     the real alignment of %sp.  However, when it does this, the
264 	     alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
265       if (SPARC_STACK_BOUNDARY_HACK
266 	  && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
267 	actual_offset -= STACK_POINTER_OFFSET;
268 #endif
269 
270       if (actual_offset % GET_MODE_SIZE (mode) != 0)
271 	return 1;
272     }
273 
274   switch (code)
275     {
276     case SYMBOL_REF:
277       if (SYMBOL_REF_WEAK (x))
278 	return 1;
279       if (!CONSTANT_POOL_ADDRESS_P (x))
280 	{
281 	  tree decl;
282 	  HOST_WIDE_INT decl_size;
283 
284 	  if (offset < 0)
285 	    return 1;
286 	  if (size == 0)
287 	    size = GET_MODE_SIZE (mode);
288 	  if (size == 0)
289 	    return offset != 0;
290 
291 	  /* If the size of the access or of the symbol is unknown,
292 	     assume the worst.  */
293 	  decl = SYMBOL_REF_DECL (x);
294 
295 	  /* Else check that the access is in bounds.  TODO: restructure
296 	     expr_size/tree_expr_size/int_expr_size and just use the latter.  */
297 	  if (!decl)
298 	    decl_size = -1;
299 	  else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
300 	    decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
301 			 ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
302 			 : -1);
303 	  else if (TREE_CODE (decl) == STRING_CST)
304 	    decl_size = TREE_STRING_LENGTH (decl);
305 	  else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
306 	    decl_size = int_size_in_bytes (TREE_TYPE (decl));
307 	  else
308 	    decl_size = -1;
309 
310 	  return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
311         }
312 
313       return 0;
314 
315     case LABEL_REF:
316       return 0;
317 
318     case REG:
319       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
320       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
321 	  || x == stack_pointer_rtx
322 	  /* The arg pointer varies if it is not a fixed register.  */
323 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
324 	return 0;
325       /* All of the virtual frame registers are stack references.  */
326       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
327 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
328 	return 0;
329       return 1;
330 
331     case CONST:
332       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
333 				    mode, unaligned_mems);
334 
335     case PLUS:
336       /* An address is assumed not to trap if:
337          - it is the pic register plus a constant.  */
338       if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
339 	return 0;
340 
341       /* - or it is an address that can't trap plus a constant integer,
342 	   with the proper remainder modulo the mode size if we are
343 	   considering unaligned memory references.  */
344       if (CONST_INT_P (XEXP (x, 1))
345 	  && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
346 				     size, mode, unaligned_mems))
347 	return 0;
348 
349       return 1;
350 
351     case LO_SUM:
352     case PRE_MODIFY:
353       return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
354 				    mode, unaligned_mems);
355 
356     case PRE_DEC:
357     case PRE_INC:
358     case POST_DEC:
359     case POST_INC:
360     case POST_MODIFY:
361       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
362 				    mode, unaligned_mems);
363 
364     default:
365       break;
366     }
367 
368   /* If it isn't one of the case above, it can cause a trap.  */
369   return 1;
370 }
371 
372 /* Return nonzero if the use of X as an address in a MEM can cause a trap.  */
373 
374 int
375 rtx_addr_can_trap_p (const_rtx x)
376 {
377   return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
378 }
379 
380 /* Return true if X is an address that is known to not be zero.  */
381 
382 bool
383 nonzero_address_p (const_rtx x)
384 {
385   const enum rtx_code code = GET_CODE (x);
386 
387   switch (code)
388     {
389     case SYMBOL_REF:
390       return !SYMBOL_REF_WEAK (x);
391 
392     case LABEL_REF:
393       return true;
394 
395     case REG:
396       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
397       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
398 	  || x == stack_pointer_rtx
399 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
400 	return true;
401       /* All of the virtual frame registers are stack references.  */
402       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
403 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
404 	return true;
405       return false;
406 
407     case CONST:
408       return nonzero_address_p (XEXP (x, 0));
409 
410     case PLUS:
411       if (CONST_INT_P (XEXP (x, 1)))
412         return nonzero_address_p (XEXP (x, 0));
413       /* Handle PIC references.  */
414       else if (XEXP (x, 0) == pic_offset_table_rtx
415 	       && CONSTANT_P (XEXP (x, 1)))
416 	return true;
417       return false;
418 
419     case PRE_MODIFY:
420       /* Similar to the above; allow positive offsets.  Further, since
421 	 auto-inc is only allowed in memories, the register must be a
422 	 pointer.  */
423       if (CONST_INT_P (XEXP (x, 1))
424 	  && INTVAL (XEXP (x, 1)) > 0)
425 	return true;
426       return nonzero_address_p (XEXP (x, 0));
427 
428     case PRE_INC:
429       /* Similarly.  Further, the offset is always positive.  */
430       return true;
431 
432     case PRE_DEC:
433     case POST_DEC:
434     case POST_INC:
435     case POST_MODIFY:
436       return nonzero_address_p (XEXP (x, 0));
437 
438     case LO_SUM:
439       return nonzero_address_p (XEXP (x, 1));
440 
441     default:
442       break;
443     }
444 
445   /* If it isn't one of the case above, might be zero.  */
446   return false;
447 }
448 
449 /* Return 1 if X refers to a memory location whose address
450    cannot be compared reliably with constant addresses,
451    or if X refers to a BLKmode memory object.
452    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
453    zero, we are slightly more conservative.  */
454 
455 bool
456 rtx_addr_varies_p (const_rtx x, bool for_alias)
457 {
458   enum rtx_code code;
459   int i;
460   const char *fmt;
461 
462   if (x == 0)
463     return 0;
464 
465   code = GET_CODE (x);
466   if (code == MEM)
467     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
468 
469   fmt = GET_RTX_FORMAT (code);
470   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
471     if (fmt[i] == 'e')
472       {
473 	if (rtx_addr_varies_p (XEXP (x, i), for_alias))
474 	  return 1;
475       }
476     else if (fmt[i] == 'E')
477       {
478 	int j;
479 	for (j = 0; j < XVECLEN (x, i); j++)
480 	  if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
481 	    return 1;
482       }
483   return 0;
484 }
485 
486 /* Return the value of the integer term in X, if one is apparent;
487    otherwise return 0.
488    Only obvious integer terms are detected.
489    This is used in cse.c with the `related_value' field.  */
490 
491 HOST_WIDE_INT
492 get_integer_term (const_rtx x)
493 {
494   if (GET_CODE (x) == CONST)
495     x = XEXP (x, 0);
496 
497   if (GET_CODE (x) == MINUS
498       && CONST_INT_P (XEXP (x, 1)))
499     return - INTVAL (XEXP (x, 1));
500   if (GET_CODE (x) == PLUS
501       && CONST_INT_P (XEXP (x, 1)))
502     return INTVAL (XEXP (x, 1));
503   return 0;
504 }
505 
506 /* If X is a constant, return the value sans apparent integer term;
507    otherwise return 0.
508    Only obvious integer terms are detected.  */
509 
510 rtx
511 get_related_value (const_rtx x)
512 {
513   if (GET_CODE (x) != CONST)
514     return 0;
515   x = XEXP (x, 0);
516   if (GET_CODE (x) == PLUS
517       && CONST_INT_P (XEXP (x, 1)))
518     return XEXP (x, 0);
519   else if (GET_CODE (x) == MINUS
520 	   && CONST_INT_P (XEXP (x, 1)))
521     return XEXP (x, 0);
522   return 0;
523 }
524 
525 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
526    to somewhere in the same object or object_block as SYMBOL.  */
527 
528 bool
529 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
530 {
531   tree decl;
532 
533   if (GET_CODE (symbol) != SYMBOL_REF)
534     return false;
535 
536   if (offset == 0)
537     return true;
538 
539   if (offset > 0)
540     {
541       if (CONSTANT_POOL_ADDRESS_P (symbol)
542 	  && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
543 	return true;
544 
545       decl = SYMBOL_REF_DECL (symbol);
546       if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
547 	return true;
548     }
549 
550   if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
551       && SYMBOL_REF_BLOCK (symbol)
552       && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
553       && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
554 	  < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
555     return true;
556 
557   return false;
558 }
559 
560 /* Split X into a base and a constant offset, storing them in *BASE_OUT
561    and *OFFSET_OUT respectively.  */
562 
563 void
564 split_const (rtx x, rtx *base_out, rtx *offset_out)
565 {
566   if (GET_CODE (x) == CONST)
567     {
568       x = XEXP (x, 0);
569       if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
570 	{
571 	  *base_out = XEXP (x, 0);
572 	  *offset_out = XEXP (x, 1);
573 	  return;
574 	}
575     }
576   *base_out = x;
577   *offset_out = const0_rtx;
578 }
579 
580 /* Return the number of places FIND appears within X.  If COUNT_DEST is
581    zero, we do not count occurrences inside the destination of a SET.  */
582 
583 int
584 count_occurrences (const_rtx x, const_rtx find, int count_dest)
585 {
586   int i, j;
587   enum rtx_code code;
588   const char *format_ptr;
589   int count;
590 
591   if (x == find)
592     return 1;
593 
594   code = GET_CODE (x);
595 
596   switch (code)
597     {
598     case REG:
599     case CONST_INT:
600     case CONST_DOUBLE:
601     case CONST_FIXED:
602     case CONST_VECTOR:
603     case SYMBOL_REF:
604     case CODE_LABEL:
605     case PC:
606     case CC0:
607       return 0;
608 
609     case EXPR_LIST:
610       count = count_occurrences (XEXP (x, 0), find, count_dest);
611       if (XEXP (x, 1))
612 	count += count_occurrences (XEXP (x, 1), find, count_dest);
613       return count;
614 
615     case MEM:
616       if (MEM_P (find) && rtx_equal_p (x, find))
617 	return 1;
618       break;
619 
620     case SET:
621       if (SET_DEST (x) == find && ! count_dest)
622 	return count_occurrences (SET_SRC (x), find, count_dest);
623       break;
624 
625     default:
626       break;
627     }
628 
629   format_ptr = GET_RTX_FORMAT (code);
630   count = 0;
631 
632   for (i = 0; i < GET_RTX_LENGTH (code); i++)
633     {
634       switch (*format_ptr++)
635 	{
636 	case 'e':
637 	  count += count_occurrences (XEXP (x, i), find, count_dest);
638 	  break;
639 
640 	case 'E':
641 	  for (j = 0; j < XVECLEN (x, i); j++)
642 	    count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
643 	  break;
644 	}
645     }
646   return count;
647 }
648 
649 
650 /* Nonzero if register REG appears somewhere within IN.
651    Also works if REG is not a register; in this case it checks
652    for a subexpression of IN that is Lisp "equal" to REG.  */
653 
654 int
655 reg_mentioned_p (const_rtx reg, const_rtx in)
656 {
657   const char *fmt;
658   int i;
659   enum rtx_code code;
660 
661   if (in == 0)
662     return 0;
663 
664   if (reg == in)
665     return 1;
666 
667   if (GET_CODE (in) == LABEL_REF)
668     return reg == XEXP (in, 0);
669 
670   code = GET_CODE (in);
671 
672   switch (code)
673     {
674       /* Compare registers by number.  */
675     case REG:
676       return REG_P (reg) && REGNO (in) == REGNO (reg);
677 
678       /* These codes have no constituent expressions
679 	 and are unique.  */
680     case SCRATCH:
681     case CC0:
682     case PC:
683       return 0;
684 
685     case CONST_INT:
686     case CONST_VECTOR:
687     case CONST_DOUBLE:
688     case CONST_FIXED:
689       /* These are kept unique for a given value.  */
690       return 0;
691 
692     default:
693       break;
694     }
695 
696   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
697     return 1;
698 
699   fmt = GET_RTX_FORMAT (code);
700 
701   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
702     {
703       if (fmt[i] == 'E')
704 	{
705 	  int j;
706 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
707 	    if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
708 	      return 1;
709 	}
710       else if (fmt[i] == 'e'
711 	       && reg_mentioned_p (reg, XEXP (in, i)))
712 	return 1;
713     }
714   return 0;
715 }
716 
717 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
718    no CODE_LABEL insn.  */
719 
720 int
721 no_labels_between_p (const_rtx beg, const_rtx end)
722 {
723   rtx p;
724   if (beg == end)
725     return 0;
726   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
727     if (LABEL_P (p))
728       return 0;
729   return 1;
730 }
731 
732 /* Nonzero if register REG is used in an insn between
733    FROM_INSN and TO_INSN (exclusive of those two).  */
734 
735 int
736 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
737 {
738   rtx insn;
739 
740   if (from_insn == to_insn)
741     return 0;
742 
743   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
744     if (NONDEBUG_INSN_P (insn)
745 	&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
746 	   || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
747       return 1;
748   return 0;
749 }
750 
751 /* Nonzero if the old value of X, a register, is referenced in BODY.  If X
752    is entirely replaced by a new value and the only use is as a SET_DEST,
753    we do not consider it a reference.  */
754 
755 int
756 reg_referenced_p (const_rtx x, const_rtx body)
757 {
758   int i;
759 
760   switch (GET_CODE (body))
761     {
762     case SET:
763       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
764 	return 1;
765 
766       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
767 	 of a REG that occupies all of the REG, the insn references X if
768 	 it is mentioned in the destination.  */
769       if (GET_CODE (SET_DEST (body)) != CC0
770 	  && GET_CODE (SET_DEST (body)) != PC
771 	  && !REG_P (SET_DEST (body))
772 	  && ! (GET_CODE (SET_DEST (body)) == SUBREG
773 		&& REG_P (SUBREG_REG (SET_DEST (body)))
774 		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
775 		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
776 		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
777 			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
778 	  && reg_overlap_mentioned_p (x, SET_DEST (body)))
779 	return 1;
780       return 0;
781 
782     case ASM_OPERANDS:
783       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
784 	if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
785 	  return 1;
786       return 0;
787 
788     case CALL:
789     case USE:
790     case IF_THEN_ELSE:
791       return reg_overlap_mentioned_p (x, body);
792 
793     case TRAP_IF:
794       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
795 
796     case PREFETCH:
797       return reg_overlap_mentioned_p (x, XEXP (body, 0));
798 
799     case UNSPEC:
800     case UNSPEC_VOLATILE:
801       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
802 	if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
803 	  return 1;
804       return 0;
805 
806     case PARALLEL:
807       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
808 	if (reg_referenced_p (x, XVECEXP (body, 0, i)))
809 	  return 1;
810       return 0;
811 
812     case CLOBBER:
813       if (MEM_P (XEXP (body, 0)))
814 	if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
815 	  return 1;
816       return 0;
817 
818     case COND_EXEC:
819       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
820 	return 1;
821       return reg_referenced_p (x, COND_EXEC_CODE (body));
822 
823     default:
824       return 0;
825     }
826 }
827 
828 /* Nonzero if register REG is set or clobbered in an insn between
829    FROM_INSN and TO_INSN (exclusive of those two).  */
830 
831 int
832 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
833 {
834   const_rtx insn;
835 
836   if (from_insn == to_insn)
837     return 0;
838 
839   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
840     if (INSN_P (insn) && reg_set_p (reg, insn))
841       return 1;
842   return 0;
843 }
844 
845 /* Internals of reg_set_between_p.  */
846 int
847 reg_set_p (const_rtx reg, const_rtx insn)
848 {
849   /* We can be passed an insn or part of one.  If we are passed an insn,
850      check if a side-effect of the insn clobbers REG.  */
851   if (INSN_P (insn)
852       && (FIND_REG_INC_NOTE (insn, reg)
853 	  || (CALL_P (insn)
854 	      && ((REG_P (reg)
855 		   && REGNO (reg) < FIRST_PSEUDO_REGISTER
856 		   && overlaps_hard_reg_set_p (regs_invalidated_by_call,
857 					       GET_MODE (reg), REGNO (reg)))
858 		  || MEM_P (reg)
859 		  || find_reg_fusage (insn, CLOBBER, reg)))))
860     return 1;
861 
862   return set_of (reg, insn) != NULL_RTX;
863 }
864 
865 /* Similar to reg_set_between_p, but check all registers in X.  Return 0
866    only if none of them are modified between START and END.  Return 1 if
867    X contains a MEM; this routine does use memory aliasing.  */
868 
869 int
870 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
871 {
872   const enum rtx_code code = GET_CODE (x);
873   const char *fmt;
874   int i, j;
875   rtx insn;
876 
877   if (start == end)
878     return 0;
879 
880   switch (code)
881     {
882     case CONST_INT:
883     case CONST_DOUBLE:
884     case CONST_FIXED:
885     case CONST_VECTOR:
886     case CONST:
887     case SYMBOL_REF:
888     case LABEL_REF:
889       return 0;
890 
891     case PC:
892     case CC0:
893       return 1;
894 
895     case MEM:
896       if (modified_between_p (XEXP (x, 0), start, end))
897 	return 1;
898       if (MEM_READONLY_P (x))
899 	return 0;
900       for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
901 	if (memory_modified_in_insn_p (x, insn))
902 	  return 1;
903       return 0;
904       break;
905 
906     case REG:
907       return reg_set_between_p (x, start, end);
908 
909     default:
910       break;
911     }
912 
913   fmt = GET_RTX_FORMAT (code);
914   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
915     {
916       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
917 	return 1;
918 
919       else if (fmt[i] == 'E')
920 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
921 	  if (modified_between_p (XVECEXP (x, i, j), start, end))
922 	    return 1;
923     }
924 
925   return 0;
926 }
927 
928 /* Similar to reg_set_p, but check all registers in X.  Return 0 only if none
929    of them are modified in INSN.  Return 1 if X contains a MEM; this routine
930    does use memory aliasing.  */
931 
932 int
933 modified_in_p (const_rtx x, const_rtx insn)
934 {
935   const enum rtx_code code = GET_CODE (x);
936   const char *fmt;
937   int i, j;
938 
939   switch (code)
940     {
941     case CONST_INT:
942     case CONST_DOUBLE:
943     case CONST_FIXED:
944     case CONST_VECTOR:
945     case CONST:
946     case SYMBOL_REF:
947     case LABEL_REF:
948       return 0;
949 
950     case PC:
951     case CC0:
952       return 1;
953 
954     case MEM:
955       if (modified_in_p (XEXP (x, 0), insn))
956 	return 1;
957       if (MEM_READONLY_P (x))
958 	return 0;
959       if (memory_modified_in_insn_p (x, insn))
960 	return 1;
961       return 0;
962       break;
963 
964     case REG:
965       return reg_set_p (x, insn);
966 
967     default:
968       break;
969     }
970 
971   fmt = GET_RTX_FORMAT (code);
972   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
973     {
974       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
975 	return 1;
976 
977       else if (fmt[i] == 'E')
978 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
979 	  if (modified_in_p (XVECEXP (x, i, j), insn))
980 	    return 1;
981     }
982 
983   return 0;
984 }
985 
986 /* Helper function for set_of.  */
987 struct set_of_data
988   {
989     const_rtx found;
990     const_rtx pat;
991   };
992 
993 static void
994 set_of_1 (rtx x, const_rtx pat, void *data1)
995 {
996   struct set_of_data *const data = (struct set_of_data *) (data1);
997   if (rtx_equal_p (x, data->pat)
998       || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
999     data->found = pat;
1000 }
1001 
1002 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1003    (either directly or via STRICT_LOW_PART and similar modifiers).  */
1004 const_rtx
1005 set_of (const_rtx pat, const_rtx insn)
1006 {
1007   struct set_of_data data;
1008   data.found = NULL_RTX;
1009   data.pat = pat;
1010   note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1011   return data.found;
1012 }
1013 
1014 /* Given an INSN, return a SET expression if this insn has only a single SET.
1015    It may also have CLOBBERs, USEs, or SET whose output
1016    will not be used, which we ignore.  */
1017 
1018 rtx
1019 single_set_2 (const_rtx insn, const_rtx pat)
1020 {
1021   rtx set = NULL;
1022   int set_verified = 1;
1023   int i;
1024 
1025   if (GET_CODE (pat) == PARALLEL)
1026     {
1027       for (i = 0; i < XVECLEN (pat, 0); i++)
1028 	{
1029 	  rtx sub = XVECEXP (pat, 0, i);
1030 	  switch (GET_CODE (sub))
1031 	    {
1032 	    case USE:
1033 	    case CLOBBER:
1034 	      break;
1035 
1036 	    case SET:
1037 	      /* We can consider insns having multiple sets, where all
1038 		 but one are dead as single set insns.  In common case
1039 		 only single set is present in the pattern so we want
1040 		 to avoid checking for REG_UNUSED notes unless necessary.
1041 
1042 		 When we reach set first time, we just expect this is
1043 		 the single set we are looking for and only when more
1044 		 sets are found in the insn, we check them.  */
1045 	      if (!set_verified)
1046 		{
1047 		  if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1048 		      && !side_effects_p (set))
1049 		    set = NULL;
1050 		  else
1051 		    set_verified = 1;
1052 		}
1053 	      if (!set)
1054 		set = sub, set_verified = 0;
1055 	      else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1056 		       || side_effects_p (sub))
1057 		return NULL_RTX;
1058 	      break;
1059 
1060 	    default:
1061 	      return NULL_RTX;
1062 	    }
1063 	}
1064     }
1065   return set;
1066 }
1067 
1068 /* Given an INSN, return nonzero if it has more than one SET, else return
1069    zero.  */
1070 
1071 int
1072 multiple_sets (const_rtx insn)
1073 {
1074   int found;
1075   int i;
1076 
1077   /* INSN must be an insn.  */
1078   if (! INSN_P (insn))
1079     return 0;
1080 
1081   /* Only a PARALLEL can have multiple SETs.  */
1082   if (GET_CODE (PATTERN (insn)) == PARALLEL)
1083     {
1084       for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1085 	if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1086 	  {
1087 	    /* If we have already found a SET, then return now.  */
1088 	    if (found)
1089 	      return 1;
1090 	    else
1091 	      found = 1;
1092 	  }
1093     }
1094 
1095   /* Either zero or one SET.  */
1096   return 0;
1097 }
1098 
1099 /* Return nonzero if the destination of SET equals the source
1100    and there are no side effects.  */
1101 
1102 int
1103 set_noop_p (const_rtx set)
1104 {
1105   rtx src = SET_SRC (set);
1106   rtx dst = SET_DEST (set);
1107 
1108   if (dst == pc_rtx && src == pc_rtx)
1109     return 1;
1110 
1111   if (MEM_P (dst) && MEM_P (src))
1112     return rtx_equal_p (dst, src) && !side_effects_p (dst);
1113 
1114   if (GET_CODE (dst) == ZERO_EXTRACT)
1115     return rtx_equal_p (XEXP (dst, 0), src)
1116 	   && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1117 	   && !side_effects_p (src);
1118 
1119   if (GET_CODE (dst) == STRICT_LOW_PART)
1120     dst = XEXP (dst, 0);
1121 
1122   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1123     {
1124       if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1125 	return 0;
1126       src = SUBREG_REG (src);
1127       dst = SUBREG_REG (dst);
1128     }
1129 
1130   return (REG_P (src) && REG_P (dst)
1131 	  && REGNO (src) == REGNO (dst));
1132 }
1133 
1134 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1135    value to itself.  */
1136 
1137 int
1138 noop_move_p (const_rtx insn)
1139 {
1140   rtx pat = PATTERN (insn);
1141 
1142   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1143     return 1;
1144 
1145   /* Insns carrying these notes are useful later on.  */
1146   if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1147     return 0;
1148 
1149   if (GET_CODE (pat) == SET && set_noop_p (pat))
1150     return 1;
1151 
1152   if (GET_CODE (pat) == PARALLEL)
1153     {
1154       int i;
1155       /* If nothing but SETs of registers to themselves,
1156 	 this insn can also be deleted.  */
1157       for (i = 0; i < XVECLEN (pat, 0); i++)
1158 	{
1159 	  rtx tem = XVECEXP (pat, 0, i);
1160 
1161 	  if (GET_CODE (tem) == USE
1162 	      || GET_CODE (tem) == CLOBBER)
1163 	    continue;
1164 
1165 	  if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1166 	    return 0;
1167 	}
1168 
1169       return 1;
1170     }
1171   return 0;
1172 }
1173 
1174 
1175 /* Return the last thing that X was assigned from before *PINSN.  If VALID_TO
1176    is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1177    If the object was modified, if we hit a partial assignment to X, or hit a
1178    CODE_LABEL first, return X.  If we found an assignment, update *PINSN to
1179    point to it.  ALLOW_HWREG is set to 1 if hardware registers are allowed to
1180    be the src.  */
1181 
1182 rtx
1183 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1184 {
1185   rtx p;
1186 
1187   for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1188        p = PREV_INSN (p))
1189     if (INSN_P (p))
1190       {
1191 	rtx set = single_set (p);
1192 	rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1193 
1194 	if (set && rtx_equal_p (x, SET_DEST (set)))
1195 	  {
1196 	    rtx src = SET_SRC (set);
1197 
1198 	    if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1199 	      src = XEXP (note, 0);
1200 
1201 	    if ((valid_to == NULL_RTX
1202 		 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1203 		/* Reject hard registers because we don't usually want
1204 		   to use them; we'd rather use a pseudo.  */
1205 		&& (! (REG_P (src)
1206 		      && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1207 	      {
1208 		*pinsn = p;
1209 		return src;
1210 	      }
1211 	  }
1212 
1213 	/* If set in non-simple way, we don't have a value.  */
1214 	if (reg_set_p (x, p))
1215 	  break;
1216       }
1217 
1218   return x;
1219 }
1220 
1221 /* Return nonzero if register in range [REGNO, ENDREGNO)
1222    appears either explicitly or implicitly in X
1223    other than being stored into.
1224 
1225    References contained within the substructure at LOC do not count.
1226    LOC may be zero, meaning don't ignore anything.  */
1227 
1228 int
1229 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1230 		   rtx *loc)
1231 {
1232   int i;
1233   unsigned int x_regno;
1234   RTX_CODE code;
1235   const char *fmt;
1236 
1237  repeat:
1238   /* The contents of a REG_NONNEG note is always zero, so we must come here
1239      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
1240   if (x == 0)
1241     return 0;
1242 
1243   code = GET_CODE (x);
1244 
1245   switch (code)
1246     {
1247     case REG:
1248       x_regno = REGNO (x);
1249 
1250       /* If we modifying the stack, frame, or argument pointer, it will
1251 	 clobber a virtual register.  In fact, we could be more precise,
1252 	 but it isn't worth it.  */
1253       if ((x_regno == STACK_POINTER_REGNUM
1254 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1255 	   || x_regno == ARG_POINTER_REGNUM
1256 #endif
1257 	   || x_regno == FRAME_POINTER_REGNUM)
1258 	  && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1259 	return 1;
1260 
1261       return endregno > x_regno && regno < END_REGNO (x);
1262 
1263     case SUBREG:
1264       /* If this is a SUBREG of a hard reg, we can see exactly which
1265 	 registers are being modified.  Otherwise, handle normally.  */
1266       if (REG_P (SUBREG_REG (x))
1267 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1268 	{
1269 	  unsigned int inner_regno = subreg_regno (x);
1270 	  unsigned int inner_endregno
1271 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1272 			     ? subreg_nregs (x) : 1);
1273 
1274 	  return endregno > inner_regno && regno < inner_endregno;
1275 	}
1276       break;
1277 
1278     case CLOBBER:
1279     case SET:
1280       if (&SET_DEST (x) != loc
1281 	  /* Note setting a SUBREG counts as referring to the REG it is in for
1282 	     a pseudo but not for hard registers since we can
1283 	     treat each word individually.  */
1284 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
1285 	       && loc != &SUBREG_REG (SET_DEST (x))
1286 	       && REG_P (SUBREG_REG (SET_DEST (x)))
1287 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1288 	       && refers_to_regno_p (regno, endregno,
1289 				     SUBREG_REG (SET_DEST (x)), loc))
1290 	      || (!REG_P (SET_DEST (x))
1291 		  && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1292 	return 1;
1293 
1294       if (code == CLOBBER || loc == &SET_SRC (x))
1295 	return 0;
1296       x = SET_SRC (x);
1297       goto repeat;
1298 
1299     default:
1300       break;
1301     }
1302 
1303   /* X does not match, so try its subexpressions.  */
1304 
1305   fmt = GET_RTX_FORMAT (code);
1306   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1307     {
1308       if (fmt[i] == 'e' && loc != &XEXP (x, i))
1309 	{
1310 	  if (i == 0)
1311 	    {
1312 	      x = XEXP (x, 0);
1313 	      goto repeat;
1314 	    }
1315 	  else
1316 	    if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1317 	      return 1;
1318 	}
1319       else if (fmt[i] == 'E')
1320 	{
1321 	  int j;
1322 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1323 	    if (loc != &XVECEXP (x, i, j)
1324 		&& refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1325 	      return 1;
1326 	}
1327     }
1328   return 0;
1329 }
1330 
1331 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
1332    we check if any register number in X conflicts with the relevant register
1333    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
1334    contains a MEM (we don't bother checking for memory addresses that can't
1335    conflict because we expect this to be a rare case.  */
1336 
1337 int
1338 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1339 {
1340   unsigned int regno, endregno;
1341 
1342   /* If either argument is a constant, then modifying X can not
1343      affect IN.  Here we look at IN, we can profitably combine
1344      CONSTANT_P (x) with the switch statement below.  */
1345   if (CONSTANT_P (in))
1346     return 0;
1347 
1348  recurse:
1349   switch (GET_CODE (x))
1350     {
1351     case STRICT_LOW_PART:
1352     case ZERO_EXTRACT:
1353     case SIGN_EXTRACT:
1354       /* Overly conservative.  */
1355       x = XEXP (x, 0);
1356       goto recurse;
1357 
1358     case SUBREG:
1359       regno = REGNO (SUBREG_REG (x));
1360       if (regno < FIRST_PSEUDO_REGISTER)
1361 	regno = subreg_regno (x);
1362       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1363 			  ? subreg_nregs (x) : 1);
1364       goto do_reg;
1365 
1366     case REG:
1367       regno = REGNO (x);
1368       endregno = END_REGNO (x);
1369     do_reg:
1370       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1371 
1372     case MEM:
1373       {
1374 	const char *fmt;
1375 	int i;
1376 
1377 	if (MEM_P (in))
1378 	  return 1;
1379 
1380 	fmt = GET_RTX_FORMAT (GET_CODE (in));
1381 	for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1382 	  if (fmt[i] == 'e')
1383 	    {
1384 	      if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1385 		return 1;
1386 	    }
1387 	  else if (fmt[i] == 'E')
1388 	    {
1389 	      int j;
1390 	      for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1391 		if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1392 		  return 1;
1393 	    }
1394 
1395 	return 0;
1396       }
1397 
1398     case SCRATCH:
1399     case PC:
1400     case CC0:
1401       return reg_mentioned_p (x, in);
1402 
1403     case PARALLEL:
1404       {
1405 	int i;
1406 
1407 	/* If any register in here refers to it we return true.  */
1408 	for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1409 	  if (XEXP (XVECEXP (x, 0, i), 0) != 0
1410 	      && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1411 	    return 1;
1412 	return 0;
1413       }
1414 
1415     default:
1416       gcc_assert (CONSTANT_P (x));
1417       return 0;
1418     }
1419 }
1420 
1421 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1422    (X would be the pattern of an insn).  DATA is an arbitrary pointer,
1423    ignored by note_stores, but passed to FUN.
1424 
1425    FUN receives three arguments:
1426    1. the REG, MEM, CC0 or PC being stored in or clobbered,
1427    2. the SET or CLOBBER rtx that does the store,
1428    3. the pointer DATA provided to note_stores.
1429 
1430   If the item being stored in or clobbered is a SUBREG of a hard register,
1431   the SUBREG will be passed.  */
1432 
1433 void
1434 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1435 {
1436   int i;
1437 
1438   if (GET_CODE (x) == COND_EXEC)
1439     x = COND_EXEC_CODE (x);
1440 
1441   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1442     {
1443       rtx dest = SET_DEST (x);
1444 
1445       while ((GET_CODE (dest) == SUBREG
1446 	      && (!REG_P (SUBREG_REG (dest))
1447 		  || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1448 	     || GET_CODE (dest) == ZERO_EXTRACT
1449 	     || GET_CODE (dest) == STRICT_LOW_PART)
1450 	dest = XEXP (dest, 0);
1451 
1452       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1453 	 each of whose first operand is a register.  */
1454       if (GET_CODE (dest) == PARALLEL)
1455 	{
1456 	  for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1457 	    if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1458 	      (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1459 	}
1460       else
1461 	(*fun) (dest, x, data);
1462     }
1463 
1464   else if (GET_CODE (x) == PARALLEL)
1465     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1466       note_stores (XVECEXP (x, 0, i), fun, data);
1467 }
1468 
1469 /* Like notes_stores, but call FUN for each expression that is being
1470    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
1471    FUN for each expression, not any interior subexpressions.  FUN receives a
1472    pointer to the expression and the DATA passed to this function.
1473 
1474    Note that this is not quite the same test as that done in reg_referenced_p
1475    since that considers something as being referenced if it is being
1476    partially set, while we do not.  */
1477 
1478 void
1479 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1480 {
1481   rtx body = *pbody;
1482   int i;
1483 
1484   switch (GET_CODE (body))
1485     {
1486     case COND_EXEC:
1487       (*fun) (&COND_EXEC_TEST (body), data);
1488       note_uses (&COND_EXEC_CODE (body), fun, data);
1489       return;
1490 
1491     case PARALLEL:
1492       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1493 	note_uses (&XVECEXP (body, 0, i), fun, data);
1494       return;
1495 
1496     case SEQUENCE:
1497       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1498 	note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1499       return;
1500 
1501     case USE:
1502       (*fun) (&XEXP (body, 0), data);
1503       return;
1504 
1505     case ASM_OPERANDS:
1506       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1507 	(*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1508       return;
1509 
1510     case TRAP_IF:
1511       (*fun) (&TRAP_CONDITION (body), data);
1512       return;
1513 
1514     case PREFETCH:
1515       (*fun) (&XEXP (body, 0), data);
1516       return;
1517 
1518     case UNSPEC:
1519     case UNSPEC_VOLATILE:
1520       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1521 	(*fun) (&XVECEXP (body, 0, i), data);
1522       return;
1523 
1524     case CLOBBER:
1525       if (MEM_P (XEXP (body, 0)))
1526 	(*fun) (&XEXP (XEXP (body, 0), 0), data);
1527       return;
1528 
1529     case SET:
1530       {
1531 	rtx dest = SET_DEST (body);
1532 
1533 	/* For sets we replace everything in source plus registers in memory
1534 	   expression in store and operands of a ZERO_EXTRACT.  */
1535 	(*fun) (&SET_SRC (body), data);
1536 
1537 	if (GET_CODE (dest) == ZERO_EXTRACT)
1538 	  {
1539 	    (*fun) (&XEXP (dest, 1), data);
1540 	    (*fun) (&XEXP (dest, 2), data);
1541 	  }
1542 
1543 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1544 	  dest = XEXP (dest, 0);
1545 
1546 	if (MEM_P (dest))
1547 	  (*fun) (&XEXP (dest, 0), data);
1548       }
1549       return;
1550 
1551     default:
1552       /* All the other possibilities never store.  */
1553       (*fun) (pbody, data);
1554       return;
1555     }
1556 }
1557 
1558 /* Return nonzero if X's old contents don't survive after INSN.
1559    This will be true if X is (cc0) or if X is a register and
1560    X dies in INSN or because INSN entirely sets X.
1561 
1562    "Entirely set" means set directly and not through a SUBREG, or
1563    ZERO_EXTRACT, so no trace of the old contents remains.
1564    Likewise, REG_INC does not count.
1565 
1566    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
1567    but for this use that makes no difference, since regs don't overlap
1568    during their lifetimes.  Therefore, this function may be used
1569    at any time after deaths have been computed.
1570 
1571    If REG is a hard reg that occupies multiple machine registers, this
1572    function will only return 1 if each of those registers will be replaced
1573    by INSN.  */
1574 
1575 int
1576 dead_or_set_p (const_rtx insn, const_rtx x)
1577 {
1578   unsigned int regno, end_regno;
1579   unsigned int i;
1580 
1581   /* Can't use cc0_rtx below since this file is used by genattrtab.c.  */
1582   if (GET_CODE (x) == CC0)
1583     return 1;
1584 
1585   gcc_assert (REG_P (x));
1586 
1587   regno = REGNO (x);
1588   end_regno = END_REGNO (x);
1589   for (i = regno; i < end_regno; i++)
1590     if (! dead_or_set_regno_p (insn, i))
1591       return 0;
1592 
1593   return 1;
1594 }
1595 
1596 /* Return TRUE iff DEST is a register or subreg of a register and
1597    doesn't change the number of words of the inner register, and any
1598    part of the register is TEST_REGNO.  */
1599 
1600 static bool
1601 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1602 {
1603   unsigned int regno, endregno;
1604 
1605   if (GET_CODE (dest) == SUBREG
1606       && (((GET_MODE_SIZE (GET_MODE (dest))
1607 	    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1608 	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1609 	       + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1610     dest = SUBREG_REG (dest);
1611 
1612   if (!REG_P (dest))
1613     return false;
1614 
1615   regno = REGNO (dest);
1616   endregno = END_REGNO (dest);
1617   return (test_regno >= regno && test_regno < endregno);
1618 }
1619 
1620 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1621    any member matches the covers_regno_no_parallel_p criteria.  */
1622 
1623 static bool
1624 covers_regno_p (const_rtx dest, unsigned int test_regno)
1625 {
1626   if (GET_CODE (dest) == PARALLEL)
1627     {
1628       /* Some targets place small structures in registers for return
1629 	 values of functions, and those registers are wrapped in
1630 	 PARALLELs that we may see as the destination of a SET.  */
1631       int i;
1632 
1633       for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1634 	{
1635 	  rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1636 	  if (inner != NULL_RTX
1637 	      && covers_regno_no_parallel_p (inner, test_regno))
1638 	    return true;
1639 	}
1640 
1641       return false;
1642     }
1643   else
1644     return covers_regno_no_parallel_p (dest, test_regno);
1645 }
1646 
1647 /* Utility function for dead_or_set_p to check an individual register. */
1648 
1649 int
1650 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1651 {
1652   const_rtx pattern;
1653 
1654   /* See if there is a death note for something that includes TEST_REGNO.  */
1655   if (find_regno_note (insn, REG_DEAD, test_regno))
1656     return 1;
1657 
1658   if (CALL_P (insn)
1659       && find_regno_fusage (insn, CLOBBER, test_regno))
1660     return 1;
1661 
1662   pattern = PATTERN (insn);
1663 
1664   if (GET_CODE (pattern) == COND_EXEC)
1665     pattern = COND_EXEC_CODE (pattern);
1666 
1667   if (GET_CODE (pattern) == SET)
1668     return covers_regno_p (SET_DEST (pattern), test_regno);
1669   else if (GET_CODE (pattern) == PARALLEL)
1670     {
1671       int i;
1672 
1673       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1674 	{
1675 	  rtx body = XVECEXP (pattern, 0, i);
1676 
1677 	  if (GET_CODE (body) == COND_EXEC)
1678 	    body = COND_EXEC_CODE (body);
1679 
1680 	  if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1681 	      && covers_regno_p (SET_DEST (body), test_regno))
1682 	    return 1;
1683 	}
1684     }
1685 
1686   return 0;
1687 }
1688 
1689 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1690    If DATUM is nonzero, look for one whose datum is DATUM.  */
1691 
1692 rtx
1693 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1694 {
1695   rtx link;
1696 
1697   gcc_assert (insn);
1698 
1699   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1700   if (! INSN_P (insn))
1701     return 0;
1702   if (datum == 0)
1703     {
1704       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1705 	if (REG_NOTE_KIND (link) == kind)
1706 	  return link;
1707       return 0;
1708     }
1709 
1710   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1711     if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1712       return link;
1713   return 0;
1714 }
1715 
1716 /* Return the reg-note of kind KIND in insn INSN which applies to register
1717    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
1718    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1719    it might be the case that the note overlaps REGNO.  */
1720 
1721 rtx
1722 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1723 {
1724   rtx link;
1725 
1726   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
1727   if (! INSN_P (insn))
1728     return 0;
1729 
1730   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1731     if (REG_NOTE_KIND (link) == kind
1732 	/* Verify that it is a register, so that scratch and MEM won't cause a
1733 	   problem here.  */
1734 	&& REG_P (XEXP (link, 0))
1735 	&& REGNO (XEXP (link, 0)) <= regno
1736 	&& END_REGNO (XEXP (link, 0)) > regno)
1737       return link;
1738   return 0;
1739 }
1740 
1741 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1742    has such a note.  */
1743 
1744 rtx
1745 find_reg_equal_equiv_note (const_rtx insn)
1746 {
1747   rtx link;
1748 
1749   if (!INSN_P (insn))
1750     return 0;
1751 
1752   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1753     if (REG_NOTE_KIND (link) == REG_EQUAL
1754 	|| REG_NOTE_KIND (link) == REG_EQUIV)
1755       {
1756 	/* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1757 	   insns that have multiple sets.  Checking single_set to
1758 	   make sure of this is not the proper check, as explained
1759 	   in the comment in set_unique_reg_note.
1760 
1761 	   This should be changed into an assert.  */
1762 	if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1763 	  return 0;
1764 	return link;
1765       }
1766   return NULL;
1767 }
1768 
1769 /* Check whether INSN is a single_set whose source is known to be
1770    equivalent to a constant.  Return that constant if so, otherwise
1771    return null.  */
1772 
1773 rtx
1774 find_constant_src (const_rtx insn)
1775 {
1776   rtx note, set, x;
1777 
1778   set = single_set (insn);
1779   if (set)
1780     {
1781       x = avoid_constant_pool_reference (SET_SRC (set));
1782       if (CONSTANT_P (x))
1783 	return x;
1784     }
1785 
1786   note = find_reg_equal_equiv_note (insn);
1787   if (note && CONSTANT_P (XEXP (note, 0)))
1788     return XEXP (note, 0);
1789 
1790   return NULL_RTX;
1791 }
1792 
1793 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1794    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1795 
1796 int
1797 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1798 {
1799   /* If it's not a CALL_INSN, it can't possibly have a
1800      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
1801   if (!CALL_P (insn))
1802     return 0;
1803 
1804   gcc_assert (datum);
1805 
1806   if (!REG_P (datum))
1807     {
1808       rtx link;
1809 
1810       for (link = CALL_INSN_FUNCTION_USAGE (insn);
1811 	   link;
1812 	   link = XEXP (link, 1))
1813 	if (GET_CODE (XEXP (link, 0)) == code
1814 	    && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1815 	  return 1;
1816     }
1817   else
1818     {
1819       unsigned int regno = REGNO (datum);
1820 
1821       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1822 	 to pseudo registers, so don't bother checking.  */
1823 
1824       if (regno < FIRST_PSEUDO_REGISTER)
1825 	{
1826 	  unsigned int end_regno = END_HARD_REGNO (datum);
1827 	  unsigned int i;
1828 
1829 	  for (i = regno; i < end_regno; i++)
1830 	    if (find_regno_fusage (insn, code, i))
1831 	      return 1;
1832 	}
1833     }
1834 
1835   return 0;
1836 }
1837 
1838 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1839    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
1840 
1841 int
1842 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1843 {
1844   rtx link;
1845 
1846   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1847      to pseudo registers, so don't bother checking.  */
1848 
1849   if (regno >= FIRST_PSEUDO_REGISTER
1850       || !CALL_P (insn) )
1851     return 0;
1852 
1853   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1854     {
1855       rtx op, reg;
1856 
1857       if (GET_CODE (op = XEXP (link, 0)) == code
1858 	  && REG_P (reg = XEXP (op, 0))
1859 	  && REGNO (reg) <= regno
1860 	  && END_HARD_REGNO (reg) > regno)
1861 	return 1;
1862     }
1863 
1864   return 0;
1865 }
1866 
1867 
1868 /* Allocate a register note with kind KIND and datum DATUM.  LIST is
1869    stored as the pointer to the next register note.  */
1870 
1871 rtx
1872 alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
1873 {
1874   rtx note;
1875 
1876   switch (kind)
1877     {
1878     case REG_CC_SETTER:
1879     case REG_CC_USER:
1880     case REG_LABEL_TARGET:
1881     case REG_LABEL_OPERAND:
1882       /* These types of register notes use an INSN_LIST rather than an
1883 	 EXPR_LIST, so that copying is done right and dumps look
1884 	 better.  */
1885       note = alloc_INSN_LIST (datum, list);
1886       PUT_REG_NOTE_KIND (note, kind);
1887       break;
1888 
1889     default:
1890       note = alloc_EXPR_LIST (kind, datum, list);
1891       break;
1892     }
1893 
1894   return note;
1895 }
1896 
1897 /* Add register note with kind KIND and datum DATUM to INSN.  */
1898 
1899 void
1900 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1901 {
1902   REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
1903 }
1904 
1905 /* Remove register note NOTE from the REG_NOTES of INSN.  */
1906 
1907 void
1908 remove_note (rtx insn, const_rtx note)
1909 {
1910   rtx link;
1911 
1912   if (note == NULL_RTX)
1913     return;
1914 
1915   if (REG_NOTES (insn) == note)
1916     REG_NOTES (insn) = XEXP (note, 1);
1917   else
1918     for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1919       if (XEXP (link, 1) == note)
1920 	{
1921 	  XEXP (link, 1) = XEXP (note, 1);
1922 	  break;
1923 	}
1924 
1925   switch (REG_NOTE_KIND (note))
1926     {
1927     case REG_EQUAL:
1928     case REG_EQUIV:
1929       df_notes_rescan (insn);
1930       break;
1931     default:
1932       break;
1933     }
1934 }
1935 
1936 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.  */
1937 
1938 void
1939 remove_reg_equal_equiv_notes (rtx insn)
1940 {
1941   rtx *loc;
1942 
1943   loc = &REG_NOTES (insn);
1944   while (*loc)
1945     {
1946       enum reg_note kind = REG_NOTE_KIND (*loc);
1947       if (kind == REG_EQUAL || kind == REG_EQUIV)
1948 	*loc = XEXP (*loc, 1);
1949       else
1950 	loc = &XEXP (*loc, 1);
1951     }
1952 }
1953 
1954 /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO.  */
1955 
1956 void
1957 remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
1958 {
1959   df_ref eq_use;
1960 
1961   if (!df)
1962     return;
1963 
1964   /* This loop is a little tricky.  We cannot just go down the chain because
1965      it is being modified by the actions in the loop.  So we simply iterate
1966      over the head.  We plan to drain the list anyway.  */
1967   while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
1968     {
1969       rtx insn = DF_REF_INSN (eq_use);
1970       rtx note = find_reg_equal_equiv_note (insn);
1971 
1972       /* This assert is generally triggered when someone deletes a REG_EQUAL
1973 	 or REG_EQUIV note by hacking the list manually rather than calling
1974 	 remove_note.  */
1975       gcc_assert (note);
1976 
1977       remove_note (insn, note);
1978     }
1979 }
1980 
1981 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1982    return 1 if it is found.  A simple equality test is used to determine if
1983    NODE matches.  */
1984 
1985 int
1986 in_expr_list_p (const_rtx listp, const_rtx node)
1987 {
1988   const_rtx x;
1989 
1990   for (x = listp; x; x = XEXP (x, 1))
1991     if (node == XEXP (x, 0))
1992       return 1;
1993 
1994   return 0;
1995 }
1996 
1997 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1998    remove that entry from the list if it is found.
1999 
2000    A simple equality test is used to determine if NODE matches.  */
2001 
2002 void
2003 remove_node_from_expr_list (const_rtx node, rtx *listp)
2004 {
2005   rtx temp = *listp;
2006   rtx prev = NULL_RTX;
2007 
2008   while (temp)
2009     {
2010       if (node == XEXP (temp, 0))
2011 	{
2012 	  /* Splice the node out of the list.  */
2013 	  if (prev)
2014 	    XEXP (prev, 1) = XEXP (temp, 1);
2015 	  else
2016 	    *listp = XEXP (temp, 1);
2017 
2018 	  return;
2019 	}
2020 
2021       prev = temp;
2022       temp = XEXP (temp, 1);
2023     }
2024 }
2025 
2026 /* Nonzero if X contains any volatile instructions.  These are instructions
2027    which may cause unpredictable machine state instructions, and thus no
2028    instructions should be moved or combined across them.  This includes
2029    only volatile asms and UNSPEC_VOLATILE instructions.  */
2030 
2031 int
2032 volatile_insn_p (const_rtx x)
2033 {
2034   const RTX_CODE code = GET_CODE (x);
2035   switch (code)
2036     {
2037     case LABEL_REF:
2038     case SYMBOL_REF:
2039     case CONST_INT:
2040     case CONST:
2041     case CONST_DOUBLE:
2042     case CONST_FIXED:
2043     case CONST_VECTOR:
2044     case CC0:
2045     case PC:
2046     case REG:
2047     case SCRATCH:
2048     case CLOBBER:
2049     case ADDR_VEC:
2050     case ADDR_DIFF_VEC:
2051     case CALL:
2052     case MEM:
2053       return 0;
2054 
2055     case UNSPEC_VOLATILE:
2056  /* case TRAP_IF: This isn't clear yet.  */
2057       return 1;
2058 
2059     case ASM_INPUT:
2060     case ASM_OPERANDS:
2061       if (MEM_VOLATILE_P (x))
2062 	return 1;
2063 
2064     default:
2065       break;
2066     }
2067 
2068   /* Recursively scan the operands of this expression.  */
2069 
2070   {
2071     const char *const fmt = GET_RTX_FORMAT (code);
2072     int i;
2073 
2074     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2075       {
2076 	if (fmt[i] == 'e')
2077 	  {
2078 	    if (volatile_insn_p (XEXP (x, i)))
2079 	      return 1;
2080 	  }
2081 	else if (fmt[i] == 'E')
2082 	  {
2083 	    int j;
2084 	    for (j = 0; j < XVECLEN (x, i); j++)
2085 	      if (volatile_insn_p (XVECEXP (x, i, j)))
2086 		return 1;
2087 	  }
2088       }
2089   }
2090   return 0;
2091 }
2092 
2093 /* Nonzero if X contains any volatile memory references
2094    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
2095 
2096 int
2097 volatile_refs_p (const_rtx x)
2098 {
2099   const RTX_CODE code = GET_CODE (x);
2100   switch (code)
2101     {
2102     case LABEL_REF:
2103     case SYMBOL_REF:
2104     case CONST_INT:
2105     case CONST:
2106     case CONST_DOUBLE:
2107     case CONST_FIXED:
2108     case CONST_VECTOR:
2109     case CC0:
2110     case PC:
2111     case REG:
2112     case SCRATCH:
2113     case CLOBBER:
2114     case ADDR_VEC:
2115     case ADDR_DIFF_VEC:
2116       return 0;
2117 
2118     case UNSPEC_VOLATILE:
2119       return 1;
2120 
2121     case MEM:
2122     case ASM_INPUT:
2123     case ASM_OPERANDS:
2124       if (MEM_VOLATILE_P (x))
2125 	return 1;
2126 
2127     default:
2128       break;
2129     }
2130 
2131   /* Recursively scan the operands of this expression.  */
2132 
2133   {
2134     const char *const fmt = GET_RTX_FORMAT (code);
2135     int i;
2136 
2137     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2138       {
2139 	if (fmt[i] == 'e')
2140 	  {
2141 	    if (volatile_refs_p (XEXP (x, i)))
2142 	      return 1;
2143 	  }
2144 	else if (fmt[i] == 'E')
2145 	  {
2146 	    int j;
2147 	    for (j = 0; j < XVECLEN (x, i); j++)
2148 	      if (volatile_refs_p (XVECEXP (x, i, j)))
2149 		return 1;
2150 	  }
2151       }
2152   }
2153   return 0;
2154 }
2155 
2156 /* Similar to above, except that it also rejects register pre- and post-
2157    incrementing.  */
2158 
2159 int
2160 side_effects_p (const_rtx x)
2161 {
2162   const RTX_CODE code = GET_CODE (x);
2163   switch (code)
2164     {
2165     case LABEL_REF:
2166     case SYMBOL_REF:
2167     case CONST_INT:
2168     case CONST:
2169     case CONST_DOUBLE:
2170     case CONST_FIXED:
2171     case CONST_VECTOR:
2172     case CC0:
2173     case PC:
2174     case REG:
2175     case SCRATCH:
2176     case ADDR_VEC:
2177     case ADDR_DIFF_VEC:
2178     case VAR_LOCATION:
2179       return 0;
2180 
2181     case CLOBBER:
2182       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.c
2183 	 when some combination can't be done.  If we see one, don't think
2184 	 that we can simplify the expression.  */
2185       return (GET_MODE (x) != VOIDmode);
2186 
2187     case PRE_INC:
2188     case PRE_DEC:
2189     case POST_INC:
2190     case POST_DEC:
2191     case PRE_MODIFY:
2192     case POST_MODIFY:
2193     case CALL:
2194     case UNSPEC_VOLATILE:
2195  /* case TRAP_IF: This isn't clear yet.  */
2196       return 1;
2197 
2198     case MEM:
2199     case ASM_INPUT:
2200     case ASM_OPERANDS:
2201       if (MEM_VOLATILE_P (x))
2202 	return 1;
2203 
2204     default:
2205       break;
2206     }
2207 
2208   /* Recursively scan the operands of this expression.  */
2209 
2210   {
2211     const char *fmt = GET_RTX_FORMAT (code);
2212     int i;
2213 
2214     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2215       {
2216 	if (fmt[i] == 'e')
2217 	  {
2218 	    if (side_effects_p (XEXP (x, i)))
2219 	      return 1;
2220 	  }
2221 	else if (fmt[i] == 'E')
2222 	  {
2223 	    int j;
2224 	    for (j = 0; j < XVECLEN (x, i); j++)
2225 	      if (side_effects_p (XVECEXP (x, i, j)))
2226 		return 1;
2227 	  }
2228       }
2229   }
2230   return 0;
2231 }
2232 
2233 /* Return nonzero if evaluating rtx X might cause a trap.
2234    FLAGS controls how to consider MEMs.  A nonzero means the context
2235    of the access may have changed from the original, such that the
2236    address may have become invalid.  */
2237 
2238 int
2239 may_trap_p_1 (const_rtx x, unsigned flags)
2240 {
2241   int i;
2242   enum rtx_code code;
2243   const char *fmt;
2244 
2245   /* We make no distinction currently, but this function is part of
2246      the internal target-hooks ABI so we keep the parameter as
2247      "unsigned flags".  */
2248   bool code_changed = flags != 0;
2249 
2250   if (x == 0)
2251     return 0;
2252   code = GET_CODE (x);
2253   switch (code)
2254     {
2255       /* Handle these cases quickly.  */
2256     case CONST_INT:
2257     case CONST_DOUBLE:
2258     case CONST_FIXED:
2259     case CONST_VECTOR:
2260     case SYMBOL_REF:
2261     case LABEL_REF:
2262     case CONST:
2263     case PC:
2264     case CC0:
2265     case REG:
2266     case SCRATCH:
2267       return 0;
2268 
2269     case UNSPEC:
2270     case UNSPEC_VOLATILE:
2271       return targetm.unspec_may_trap_p (x, flags);
2272 
2273     case ASM_INPUT:
2274     case TRAP_IF:
2275       return 1;
2276 
2277     case ASM_OPERANDS:
2278       return MEM_VOLATILE_P (x);
2279 
2280       /* Memory ref can trap unless it's a static var or a stack slot.  */
2281     case MEM:
2282       /* Recognize specific pattern of stack checking probes.  */
2283       if (flag_stack_check
2284 	  && MEM_VOLATILE_P (x)
2285 	  && XEXP (x, 0) == stack_pointer_rtx)
2286 	return 1;
2287       if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2288 	     reference; moving it out of context such as when moving code
2289 	     when optimizing, might cause its address to become invalid.  */
2290 	  code_changed
2291 	  || !MEM_NOTRAP_P (x))
2292 	{
2293 	  HOST_WIDE_INT size = MEM_SIZE (x) ? INTVAL (MEM_SIZE (x)) : 0;
2294 	  return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2295 					GET_MODE (x), code_changed);
2296 	}
2297 
2298       return 0;
2299 
2300       /* Division by a non-constant might trap.  */
2301     case DIV:
2302     case MOD:
2303     case UDIV:
2304     case UMOD:
2305       if (HONOR_SNANS (GET_MODE (x)))
2306 	return 1;
2307       if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2308 	return flag_trapping_math;
2309       if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2310 	return 1;
2311       break;
2312 
2313     case EXPR_LIST:
2314       /* An EXPR_LIST is used to represent a function call.  This
2315 	 certainly may trap.  */
2316       return 1;
2317 
2318     case GE:
2319     case GT:
2320     case LE:
2321     case LT:
2322     case LTGT:
2323     case COMPARE:
2324       /* Some floating point comparisons may trap.  */
2325       if (!flag_trapping_math)
2326 	break;
2327       /* ??? There is no machine independent way to check for tests that trap
2328 	 when COMPARE is used, though many targets do make this distinction.
2329 	 For instance, sparc uses CCFPE for compares which generate exceptions
2330 	 and CCFP for compares which do not generate exceptions.  */
2331       if (HONOR_NANS (GET_MODE (x)))
2332 	return 1;
2333       /* But often the compare has some CC mode, so check operand
2334 	 modes as well.  */
2335       if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2336 	  || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2337 	return 1;
2338       break;
2339 
2340     case EQ:
2341     case NE:
2342       if (HONOR_SNANS (GET_MODE (x)))
2343 	return 1;
2344       /* Often comparison is CC mode, so check operand modes.  */
2345       if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2346 	  || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2347 	return 1;
2348       break;
2349 
2350     case FIX:
2351       /* Conversion of floating point might trap.  */
2352       if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2353 	return 1;
2354       break;
2355 
2356     case NEG:
2357     case ABS:
2358     case SUBREG:
2359       /* These operations don't trap even with floating point.  */
2360       break;
2361 
2362     default:
2363       /* Any floating arithmetic may trap.  */
2364       if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2365 	  && flag_trapping_math)
2366 	return 1;
2367     }
2368 
2369   fmt = GET_RTX_FORMAT (code);
2370   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2371     {
2372       if (fmt[i] == 'e')
2373 	{
2374 	  if (may_trap_p_1 (XEXP (x, i), flags))
2375 	    return 1;
2376 	}
2377       else if (fmt[i] == 'E')
2378 	{
2379 	  int j;
2380 	  for (j = 0; j < XVECLEN (x, i); j++)
2381 	    if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2382 	      return 1;
2383 	}
2384     }
2385   return 0;
2386 }
2387 
2388 /* Return nonzero if evaluating rtx X might cause a trap.  */
2389 
2390 int
2391 may_trap_p (const_rtx x)
2392 {
2393   return may_trap_p_1 (x, 0);
2394 }
2395 
2396 /* Same as above, but additionally return nonzero if evaluating rtx X might
2397    cause a fault.  We define a fault for the purpose of this function as a
2398    erroneous execution condition that cannot be encountered during the normal
2399    execution of a valid program; the typical example is an unaligned memory
2400    access on a strict alignment machine.  The compiler guarantees that it
2401    doesn't generate code that will fault from a valid program, but this
2402    guarantee doesn't mean anything for individual instructions.  Consider
2403    the following example:
2404 
2405       struct S { int d; union { char *cp; int *ip; }; };
2406 
2407       int foo(struct S *s)
2408       {
2409 	if (s->d == 1)
2410 	  return *s->ip;
2411 	else
2412 	  return *s->cp;
2413       }
2414 
2415    on a strict alignment machine.  In a valid program, foo will never be
2416    invoked on a structure for which d is equal to 1 and the underlying
2417    unique field of the union not aligned on a 4-byte boundary, but the
2418    expression *s->ip might cause a fault if considered individually.
2419 
2420    At the RTL level, potentially problematic expressions will almost always
2421    verify may_trap_p; for example, the above dereference can be emitted as
2422    (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2423    However, suppose that foo is inlined in a caller that causes s->cp to
2424    point to a local character variable and guarantees that s->d is not set
2425    to 1; foo may have been effectively translated into pseudo-RTL as:
2426 
2427       if ((reg:SI) == 1)
2428 	(set (reg:SI) (mem:SI (%fp - 7)))
2429       else
2430 	(set (reg:QI) (mem:QI (%fp - 7)))
2431 
2432    Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2433    memory reference to a stack slot, but it will certainly cause a fault
2434    on a strict alignment machine.  */
2435 
2436 int
2437 may_trap_or_fault_p (const_rtx x)
2438 {
2439   return may_trap_p_1 (x, 1);
2440 }
2441 
2442 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2443    i.e., an inequality.  */
2444 
2445 int
2446 inequality_comparisons_p (const_rtx x)
2447 {
2448   const char *fmt;
2449   int len, i;
2450   const enum rtx_code code = GET_CODE (x);
2451 
2452   switch (code)
2453     {
2454     case REG:
2455     case SCRATCH:
2456     case PC:
2457     case CC0:
2458     case CONST_INT:
2459     case CONST_DOUBLE:
2460     case CONST_FIXED:
2461     case CONST_VECTOR:
2462     case CONST:
2463     case LABEL_REF:
2464     case SYMBOL_REF:
2465       return 0;
2466 
2467     case LT:
2468     case LTU:
2469     case GT:
2470     case GTU:
2471     case LE:
2472     case LEU:
2473     case GE:
2474     case GEU:
2475       return 1;
2476 
2477     default:
2478       break;
2479     }
2480 
2481   len = GET_RTX_LENGTH (code);
2482   fmt = GET_RTX_FORMAT (code);
2483 
2484   for (i = 0; i < len; i++)
2485     {
2486       if (fmt[i] == 'e')
2487 	{
2488 	  if (inequality_comparisons_p (XEXP (x, i)))
2489 	    return 1;
2490 	}
2491       else if (fmt[i] == 'E')
2492 	{
2493 	  int j;
2494 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2495 	    if (inequality_comparisons_p (XVECEXP (x, i, j)))
2496 	      return 1;
2497 	}
2498     }
2499 
2500   return 0;
2501 }
2502 
2503 /* Replace any occurrence of FROM in X with TO.  The function does
2504    not enter into CONST_DOUBLE for the replace.
2505 
2506    Note that copying is not done so X must not be shared unless all copies
2507    are to be modified.  */
2508 
2509 rtx
2510 replace_rtx (rtx x, rtx from, rtx to)
2511 {
2512   int i, j;
2513   const char *fmt;
2514 
2515   /* The following prevents loops occurrence when we change MEM in
2516      CONST_DOUBLE onto the same CONST_DOUBLE.  */
2517   if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2518     return x;
2519 
2520   if (x == from)
2521     return to;
2522 
2523   /* Allow this function to make replacements in EXPR_LISTs.  */
2524   if (x == 0)
2525     return 0;
2526 
2527   if (GET_CODE (x) == SUBREG)
2528     {
2529       rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2530 
2531       if (CONST_INT_P (new_rtx))
2532 	{
2533 	  x = simplify_subreg (GET_MODE (x), new_rtx,
2534 			       GET_MODE (SUBREG_REG (x)),
2535 			       SUBREG_BYTE (x));
2536 	  gcc_assert (x);
2537 	}
2538       else
2539 	SUBREG_REG (x) = new_rtx;
2540 
2541       return x;
2542     }
2543   else if (GET_CODE (x) == ZERO_EXTEND)
2544     {
2545       rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2546 
2547       if (CONST_INT_P (new_rtx))
2548 	{
2549 	  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2550 					new_rtx, GET_MODE (XEXP (x, 0)));
2551 	  gcc_assert (x);
2552 	}
2553       else
2554 	XEXP (x, 0) = new_rtx;
2555 
2556       return x;
2557     }
2558 
2559   fmt = GET_RTX_FORMAT (GET_CODE (x));
2560   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2561     {
2562       if (fmt[i] == 'e')
2563 	XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2564       else if (fmt[i] == 'E')
2565 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2566 	  XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2567     }
2568 
2569   return x;
2570 }
2571 
2572 /* Replace occurrences of the old label in *X with the new one.
2573    DATA is a REPLACE_LABEL_DATA containing the old and new labels.  */
2574 
2575 int
2576 replace_label (rtx *x, void *data)
2577 {
2578   rtx l = *x;
2579   rtx old_label = ((replace_label_data *) data)->r1;
2580   rtx new_label = ((replace_label_data *) data)->r2;
2581   bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2582 
2583   if (l == NULL_RTX)
2584     return 0;
2585 
2586   if (GET_CODE (l) == SYMBOL_REF
2587       && CONSTANT_POOL_ADDRESS_P (l))
2588     {
2589       rtx c = get_pool_constant (l);
2590       if (rtx_referenced_p (old_label, c))
2591 	{
2592 	  rtx new_c, new_l;
2593 	  replace_label_data *d = (replace_label_data *) data;
2594 
2595 	  /* Create a copy of constant C; replace the label inside
2596 	     but do not update LABEL_NUSES because uses in constant pool
2597 	     are not counted.  */
2598 	  new_c = copy_rtx (c);
2599 	  d->update_label_nuses = false;
2600 	  for_each_rtx (&new_c, replace_label, data);
2601 	  d->update_label_nuses = update_label_nuses;
2602 
2603 	  /* Add the new constant NEW_C to constant pool and replace
2604 	     the old reference to constant by new reference.  */
2605 	  new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2606 	  *x = replace_rtx (l, l, new_l);
2607 	}
2608       return 0;
2609     }
2610 
2611   /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2612      field.  This is not handled by for_each_rtx because it doesn't
2613      handle unprinted ('0') fields.  */
2614   if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2615     JUMP_LABEL (l) = new_label;
2616 
2617   if ((GET_CODE (l) == LABEL_REF
2618        || GET_CODE (l) == INSN_LIST)
2619       && XEXP (l, 0) == old_label)
2620     {
2621       XEXP (l, 0) = new_label;
2622       if (update_label_nuses)
2623 	{
2624 	  ++LABEL_NUSES (new_label);
2625 	  --LABEL_NUSES (old_label);
2626 	}
2627       return 0;
2628     }
2629 
2630   return 0;
2631 }
2632 
2633 /* When *BODY is equal to X or X is directly referenced by *BODY
2634    return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2635    too, otherwise FOR_EACH_RTX continues traversing *BODY.  */
2636 
2637 static int
2638 rtx_referenced_p_1 (rtx *body, void *x)
2639 {
2640   rtx y = (rtx) x;
2641 
2642   if (*body == NULL_RTX)
2643     return y == NULL_RTX;
2644 
2645   /* Return true if a label_ref *BODY refers to label Y.  */
2646   if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2647     return XEXP (*body, 0) == y;
2648 
2649   /* If *BODY is a reference to pool constant traverse the constant.  */
2650   if (GET_CODE (*body) == SYMBOL_REF
2651       && CONSTANT_POOL_ADDRESS_P (*body))
2652     return rtx_referenced_p (y, get_pool_constant (*body));
2653 
2654   /* By default, compare the RTL expressions.  */
2655   return rtx_equal_p (*body, y);
2656 }
2657 
2658 /* Return true if X is referenced in BODY.  */
2659 
2660 int
2661 rtx_referenced_p (rtx x, rtx body)
2662 {
2663   return for_each_rtx (&body, rtx_referenced_p_1, x);
2664 }
2665 
2666 /* If INSN is a tablejump return true and store the label (before jump table) to
2667    *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
2668 
2669 bool
2670 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2671 {
2672   rtx label, table;
2673 
2674   if (JUMP_P (insn)
2675       && (label = JUMP_LABEL (insn)) != NULL_RTX
2676       && (table = next_active_insn (label)) != NULL_RTX
2677       && JUMP_TABLE_DATA_P (table))
2678     {
2679       if (labelp)
2680 	*labelp = label;
2681       if (tablep)
2682 	*tablep = table;
2683       return true;
2684     }
2685   return false;
2686 }
2687 
2688 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2689    constant that is not in the constant pool and not in the condition
2690    of an IF_THEN_ELSE.  */
2691 
2692 static int
2693 computed_jump_p_1 (const_rtx x)
2694 {
2695   const enum rtx_code code = GET_CODE (x);
2696   int i, j;
2697   const char *fmt;
2698 
2699   switch (code)
2700     {
2701     case LABEL_REF:
2702     case PC:
2703       return 0;
2704 
2705     case CONST:
2706     case CONST_INT:
2707     case CONST_DOUBLE:
2708     case CONST_FIXED:
2709     case CONST_VECTOR:
2710     case SYMBOL_REF:
2711     case REG:
2712       return 1;
2713 
2714     case MEM:
2715       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2716 		&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2717 
2718     case IF_THEN_ELSE:
2719       return (computed_jump_p_1 (XEXP (x, 1))
2720 	      || computed_jump_p_1 (XEXP (x, 2)));
2721 
2722     default:
2723       break;
2724     }
2725 
2726   fmt = GET_RTX_FORMAT (code);
2727   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2728     {
2729       if (fmt[i] == 'e'
2730 	  && computed_jump_p_1 (XEXP (x, i)))
2731 	return 1;
2732 
2733       else if (fmt[i] == 'E')
2734 	for (j = 0; j < XVECLEN (x, i); j++)
2735 	  if (computed_jump_p_1 (XVECEXP (x, i, j)))
2736 	    return 1;
2737     }
2738 
2739   return 0;
2740 }
2741 
2742 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2743 
2744    Tablejumps and casesi insns are not considered indirect jumps;
2745    we can recognize them by a (use (label_ref)).  */
2746 
2747 int
2748 computed_jump_p (const_rtx insn)
2749 {
2750   int i;
2751   if (JUMP_P (insn))
2752     {
2753       rtx pat = PATTERN (insn);
2754 
2755       /* If we have a JUMP_LABEL set, we're not a computed jump.  */
2756       if (JUMP_LABEL (insn) != NULL)
2757 	return 0;
2758 
2759       if (GET_CODE (pat) == PARALLEL)
2760 	{
2761 	  int len = XVECLEN (pat, 0);
2762 	  int has_use_labelref = 0;
2763 
2764 	  for (i = len - 1; i >= 0; i--)
2765 	    if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2766 		&& (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2767 		    == LABEL_REF))
2768 	      has_use_labelref = 1;
2769 
2770 	  if (! has_use_labelref)
2771 	    for (i = len - 1; i >= 0; i--)
2772 	      if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2773 		  && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2774 		  && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2775 		return 1;
2776 	}
2777       else if (GET_CODE (pat) == SET
2778 	       && SET_DEST (pat) == pc_rtx
2779 	       && computed_jump_p_1 (SET_SRC (pat)))
2780 	return 1;
2781     }
2782   return 0;
2783 }
2784 
2785 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2786    calls.  Processes the subexpressions of EXP and passes them to F.  */
2787 static int
2788 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2789 {
2790   int result, i, j;
2791   const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2792   rtx *x;
2793 
2794   for (; format[n] != '\0'; n++)
2795     {
2796       switch (format[n])
2797 	{
2798 	case 'e':
2799 	  /* Call F on X.  */
2800 	  x = &XEXP (exp, n);
2801 	  result = (*f) (x, data);
2802 	  if (result == -1)
2803 	    /* Do not traverse sub-expressions.  */
2804 	    continue;
2805 	  else if (result != 0)
2806 	    /* Stop the traversal.  */
2807 	    return result;
2808 
2809 	  if (*x == NULL_RTX)
2810 	    /* There are no sub-expressions.  */
2811 	    continue;
2812 
2813 	  i = non_rtx_starting_operands[GET_CODE (*x)];
2814 	  if (i >= 0)
2815 	    {
2816 	      result = for_each_rtx_1 (*x, i, f, data);
2817 	      if (result != 0)
2818 		return result;
2819 	    }
2820 	  break;
2821 
2822 	case 'V':
2823 	case 'E':
2824 	  if (XVEC (exp, n) == 0)
2825 	    continue;
2826 	  for (j = 0; j < XVECLEN (exp, n); ++j)
2827 	    {
2828 	      /* Call F on X.  */
2829 	      x = &XVECEXP (exp, n, j);
2830 	      result = (*f) (x, data);
2831 	      if (result == -1)
2832 		/* Do not traverse sub-expressions.  */
2833 		continue;
2834 	      else if (result != 0)
2835 		/* Stop the traversal.  */
2836 		return result;
2837 
2838 	      if (*x == NULL_RTX)
2839 		/* There are no sub-expressions.  */
2840 		continue;
2841 
2842 	      i = non_rtx_starting_operands[GET_CODE (*x)];
2843 	      if (i >= 0)
2844 		{
2845 		  result = for_each_rtx_1 (*x, i, f, data);
2846 		  if (result != 0)
2847 		    return result;
2848 	        }
2849 	    }
2850 	  break;
2851 
2852 	default:
2853 	  /* Nothing to do.  */
2854 	  break;
2855 	}
2856     }
2857 
2858   return 0;
2859 }
2860 
2861 /* Traverse X via depth-first search, calling F for each
2862    sub-expression (including X itself).  F is also passed the DATA.
2863    If F returns -1, do not traverse sub-expressions, but continue
2864    traversing the rest of the tree.  If F ever returns any other
2865    nonzero value, stop the traversal, and return the value returned
2866    by F.  Otherwise, return 0.  This function does not traverse inside
2867    tree structure that contains RTX_EXPRs, or into sub-expressions
2868    whose format code is `0' since it is not known whether or not those
2869    codes are actually RTL.
2870 
2871    This routine is very general, and could (should?) be used to
2872    implement many of the other routines in this file.  */
2873 
2874 int
2875 for_each_rtx (rtx *x, rtx_function f, void *data)
2876 {
2877   int result;
2878   int i;
2879 
2880   /* Call F on X.  */
2881   result = (*f) (x, data);
2882   if (result == -1)
2883     /* Do not traverse sub-expressions.  */
2884     return 0;
2885   else if (result != 0)
2886     /* Stop the traversal.  */
2887     return result;
2888 
2889   if (*x == NULL_RTX)
2890     /* There are no sub-expressions.  */
2891     return 0;
2892 
2893   i = non_rtx_starting_operands[GET_CODE (*x)];
2894   if (i < 0)
2895     return 0;
2896 
2897   return for_each_rtx_1 (*x, i, f, data);
2898 }
2899 
2900 
2901 /* Searches X for any reference to REGNO, returning the rtx of the
2902    reference found if any.  Otherwise, returns NULL_RTX.  */
2903 
2904 rtx
2905 regno_use_in (unsigned int regno, rtx x)
2906 {
2907   const char *fmt;
2908   int i, j;
2909   rtx tem;
2910 
2911   if (REG_P (x) && REGNO (x) == regno)
2912     return x;
2913 
2914   fmt = GET_RTX_FORMAT (GET_CODE (x));
2915   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2916     {
2917       if (fmt[i] == 'e')
2918 	{
2919 	  if ((tem = regno_use_in (regno, XEXP (x, i))))
2920 	    return tem;
2921 	}
2922       else if (fmt[i] == 'E')
2923 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2924 	  if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2925 	    return tem;
2926     }
2927 
2928   return NULL_RTX;
2929 }
2930 
2931 /* Return a value indicating whether OP, an operand of a commutative
2932    operation, is preferred as the first or second operand.  The higher
2933    the value, the stronger the preference for being the first operand.
2934    We use negative values to indicate a preference for the first operand
2935    and positive values for the second operand.  */
2936 
2937 int
2938 commutative_operand_precedence (rtx op)
2939 {
2940   enum rtx_code code = GET_CODE (op);
2941 
2942   /* Constants always come the second operand.  Prefer "nice" constants.  */
2943   if (code == CONST_INT)
2944     return -8;
2945   if (code == CONST_DOUBLE)
2946     return -7;
2947   if (code == CONST_FIXED)
2948     return -7;
2949   op = avoid_constant_pool_reference (op);
2950   code = GET_CODE (op);
2951 
2952   switch (GET_RTX_CLASS (code))
2953     {
2954     case RTX_CONST_OBJ:
2955       if (code == CONST_INT)
2956         return -6;
2957       if (code == CONST_DOUBLE)
2958         return -5;
2959       if (code == CONST_FIXED)
2960         return -5;
2961       return -4;
2962 
2963     case RTX_EXTRA:
2964       /* SUBREGs of objects should come second.  */
2965       if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2966         return -3;
2967       return 0;
2968 
2969     case RTX_OBJ:
2970       /* Complex expressions should be the first, so decrease priority
2971          of objects.  Prefer pointer objects over non pointer objects.  */
2972       if ((REG_P (op) && REG_POINTER (op))
2973 	  || (MEM_P (op) && MEM_POINTER (op)))
2974 	return -1;
2975       return -2;
2976 
2977     case RTX_COMM_ARITH:
2978       /* Prefer operands that are themselves commutative to be first.
2979          This helps to make things linear.  In particular,
2980          (and (and (reg) (reg)) (not (reg))) is canonical.  */
2981       return 4;
2982 
2983     case RTX_BIN_ARITH:
2984       /* If only one operand is a binary expression, it will be the first
2985          operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
2986          is canonical, although it will usually be further simplified.  */
2987       return 2;
2988 
2989     case RTX_UNARY:
2990       /* Then prefer NEG and NOT.  */
2991       if (code == NEG || code == NOT)
2992         return 1;
2993 
2994     default:
2995       return 0;
2996     }
2997 }
2998 
2999 /* Return 1 iff it is necessary to swap operands of commutative operation
3000    in order to canonicalize expression.  */
3001 
3002 bool
3003 swap_commutative_operands_p (rtx x, rtx y)
3004 {
3005   return (commutative_operand_precedence (x)
3006 	  < commutative_operand_precedence (y));
3007 }
3008 
3009 /* Return 1 if X is an autoincrement side effect and the register is
3010    not the stack pointer.  */
3011 int
3012 auto_inc_p (const_rtx x)
3013 {
3014   switch (GET_CODE (x))
3015     {
3016     case PRE_INC:
3017     case POST_INC:
3018     case PRE_DEC:
3019     case POST_DEC:
3020     case PRE_MODIFY:
3021     case POST_MODIFY:
3022       /* There are no REG_INC notes for SP.  */
3023       if (XEXP (x, 0) != stack_pointer_rtx)
3024 	return 1;
3025     default:
3026       break;
3027     }
3028   return 0;
3029 }
3030 
3031 /* Return nonzero if IN contains a piece of rtl that has the address LOC.  */
3032 int
3033 loc_mentioned_in_p (rtx *loc, const_rtx in)
3034 {
3035   enum rtx_code code;
3036   const char *fmt;
3037   int i, j;
3038 
3039   if (!in)
3040     return 0;
3041 
3042   code = GET_CODE (in);
3043   fmt = GET_RTX_FORMAT (code);
3044   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3045     {
3046       if (fmt[i] == 'e')
3047 	{
3048 	  if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3049 	    return 1;
3050 	}
3051       else if (fmt[i] == 'E')
3052 	for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3053 	  if (loc == &XVECEXP (in, i, j)
3054 	      || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3055 	    return 1;
3056     }
3057   return 0;
3058 }
3059 
3060 /* Helper function for subreg_lsb.  Given a subreg's OUTER_MODE, INNER_MODE,
3061    and SUBREG_BYTE, return the bit offset where the subreg begins
3062    (counting from the least significant bit of the operand).  */
3063 
3064 unsigned int
3065 subreg_lsb_1 (enum machine_mode outer_mode,
3066 	      enum machine_mode inner_mode,
3067 	      unsigned int subreg_byte)
3068 {
3069   unsigned int bitpos;
3070   unsigned int byte;
3071   unsigned int word;
3072 
3073   /* A paradoxical subreg begins at bit position 0.  */
3074   if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3075     return 0;
3076 
3077   if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3078     /* If the subreg crosses a word boundary ensure that
3079        it also begins and ends on a word boundary.  */
3080     gcc_assert (!((subreg_byte % UNITS_PER_WORD
3081 		  + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3082 		  && (subreg_byte % UNITS_PER_WORD
3083 		      || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3084 
3085   if (WORDS_BIG_ENDIAN)
3086     word = (GET_MODE_SIZE (inner_mode)
3087 	    - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3088   else
3089     word = subreg_byte / UNITS_PER_WORD;
3090   bitpos = word * BITS_PER_WORD;
3091 
3092   if (BYTES_BIG_ENDIAN)
3093     byte = (GET_MODE_SIZE (inner_mode)
3094 	    - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3095   else
3096     byte = subreg_byte % UNITS_PER_WORD;
3097   bitpos += byte * BITS_PER_UNIT;
3098 
3099   return bitpos;
3100 }
3101 
3102 /* Given a subreg X, return the bit offset where the subreg begins
3103    (counting from the least significant bit of the reg).  */
3104 
3105 unsigned int
3106 subreg_lsb (const_rtx x)
3107 {
3108   return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3109 		       SUBREG_BYTE (x));
3110 }
3111 
3112 /* Fill in information about a subreg of a hard register.
3113    xregno - A regno of an inner hard subreg_reg (or what will become one).
3114    xmode  - The mode of xregno.
3115    offset - The byte offset.
3116    ymode  - The mode of a top level SUBREG (or what may become one).
3117    info   - Pointer to structure to fill in.  */
3118 void
3119 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3120 		 unsigned int offset, enum machine_mode ymode,
3121 		 struct subreg_info *info)
3122 {
3123   int nregs_xmode, nregs_ymode;
3124   int mode_multiple, nregs_multiple;
3125   int offset_adj, y_offset, y_offset_adj;
3126   int regsize_xmode, regsize_ymode;
3127   bool rknown;
3128 
3129   gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3130 
3131   rknown = false;
3132 
3133   /* If there are holes in a non-scalar mode in registers, we expect
3134      that it is made up of its units concatenated together.  */
3135   if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3136     {
3137       enum machine_mode xmode_unit;
3138 
3139       nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3140       if (GET_MODE_INNER (xmode) == VOIDmode)
3141 	xmode_unit = xmode;
3142       else
3143 	xmode_unit = GET_MODE_INNER (xmode);
3144       gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3145       gcc_assert (nregs_xmode
3146 		  == (GET_MODE_NUNITS (xmode)
3147 		      * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3148       gcc_assert (hard_regno_nregs[xregno][xmode]
3149 		  == (hard_regno_nregs[xregno][xmode_unit]
3150 		      * GET_MODE_NUNITS (xmode)));
3151 
3152       /* You can only ask for a SUBREG of a value with holes in the middle
3153 	 if you don't cross the holes.  (Such a SUBREG should be done by
3154 	 picking a different register class, or doing it in memory if
3155 	 necessary.)  An example of a value with holes is XCmode on 32-bit
3156 	 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3157 	 3 for each part, but in memory it's two 128-bit parts.
3158 	 Padding is assumed to be at the end (not necessarily the 'high part')
3159 	 of each unit.  */
3160       if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3161 	   < GET_MODE_NUNITS (xmode))
3162 	  && (offset / GET_MODE_SIZE (xmode_unit)
3163 	      != ((offset + GET_MODE_SIZE (ymode) - 1)
3164 		  / GET_MODE_SIZE (xmode_unit))))
3165 	{
3166 	  info->representable_p = false;
3167 	  rknown = true;
3168 	}
3169     }
3170   else
3171     nregs_xmode = hard_regno_nregs[xregno][xmode];
3172 
3173   nregs_ymode = hard_regno_nregs[xregno][ymode];
3174 
3175   /* Paradoxical subregs are otherwise valid.  */
3176   if (!rknown
3177       && offset == 0
3178       && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3179     {
3180       info->representable_p = true;
3181       /* If this is a big endian paradoxical subreg, which uses more
3182 	 actual hard registers than the original register, we must
3183 	 return a negative offset so that we find the proper highpart
3184 	 of the register.  */
3185       if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3186 	  ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3187 	info->offset = nregs_xmode - nregs_ymode;
3188       else
3189 	info->offset = 0;
3190       info->nregs = nregs_ymode;
3191       return;
3192     }
3193 
3194   /* If registers store different numbers of bits in the different
3195      modes, we cannot generally form this subreg.  */
3196   if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3197       && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3198       && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3199       && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3200     {
3201       regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3202       regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3203       if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3204 	{
3205 	  info->representable_p = false;
3206 	  info->nregs
3207 	    = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3208 	  info->offset = offset / regsize_xmode;
3209 	  return;
3210 	}
3211       if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3212 	{
3213 	  info->representable_p = false;
3214 	  info->nregs
3215 	    = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3216 	  info->offset = offset / regsize_xmode;
3217 	  return;
3218 	}
3219     }
3220 
3221   /* Lowpart subregs are otherwise valid.  */
3222   if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3223     {
3224       info->representable_p = true;
3225       rknown = true;
3226 
3227       if (offset == 0 || nregs_xmode == nregs_ymode)
3228 	{
3229 	  info->offset = 0;
3230 	  info->nregs = nregs_ymode;
3231 	  return;
3232 	}
3233     }
3234 
3235   /* This should always pass, otherwise we don't know how to verify
3236      the constraint.  These conditions may be relaxed but
3237      subreg_regno_offset would need to be redesigned.  */
3238   gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3239   gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3240 
3241   /* The XMODE value can be seen as a vector of NREGS_XMODE
3242      values.  The subreg must represent a lowpart of given field.
3243      Compute what field it is.  */
3244   offset_adj = offset;
3245   offset_adj -= subreg_lowpart_offset (ymode,
3246 				       mode_for_size (GET_MODE_BITSIZE (xmode)
3247 						      / nregs_xmode,
3248 						      MODE_INT, 0));
3249 
3250   /* Size of ymode must not be greater than the size of xmode.  */
3251   mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3252   gcc_assert (mode_multiple != 0);
3253 
3254   y_offset = offset / GET_MODE_SIZE (ymode);
3255   y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3256   nregs_multiple = nregs_xmode / nregs_ymode;
3257 
3258   gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3259   gcc_assert ((mode_multiple % nregs_multiple) == 0);
3260 
3261   if (!rknown)
3262     {
3263       info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3264       rknown = true;
3265     }
3266   info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3267   info->nregs = nregs_ymode;
3268 }
3269 
3270 /* This function returns the regno offset of a subreg expression.
3271    xregno - A regno of an inner hard subreg_reg (or what will become one).
3272    xmode  - The mode of xregno.
3273    offset - The byte offset.
3274    ymode  - The mode of a top level SUBREG (or what may become one).
3275    RETURN - The regno offset which would be used.  */
3276 unsigned int
3277 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3278 		     unsigned int offset, enum machine_mode ymode)
3279 {
3280   struct subreg_info info;
3281   subreg_get_info (xregno, xmode, offset, ymode, &info);
3282   return info.offset;
3283 }
3284 
3285 /* This function returns true when the offset is representable via
3286    subreg_offset in the given regno.
3287    xregno - A regno of an inner hard subreg_reg (or what will become one).
3288    xmode  - The mode of xregno.
3289    offset - The byte offset.
3290    ymode  - The mode of a top level SUBREG (or what may become one).
3291    RETURN - Whether the offset is representable.  */
3292 bool
3293 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3294 			       unsigned int offset, enum machine_mode ymode)
3295 {
3296   struct subreg_info info;
3297   subreg_get_info (xregno, xmode, offset, ymode, &info);
3298   return info.representable_p;
3299 }
3300 
3301 /* Return the number of a YMODE register to which
3302 
3303        (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3304 
3305    can be simplified.  Return -1 if the subreg can't be simplified.
3306 
3307    XREGNO is a hard register number.  */
3308 
3309 int
3310 simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3311 		       unsigned int offset, enum machine_mode ymode)
3312 {
3313   struct subreg_info info;
3314   unsigned int yregno;
3315 
3316 #ifdef CANNOT_CHANGE_MODE_CLASS
3317   /* Give the backend a chance to disallow the mode change.  */
3318   if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3319       && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3320       && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3321     return -1;
3322 #endif
3323 
3324   /* We shouldn't simplify stack-related registers.  */
3325   if ((!reload_completed || frame_pointer_needed)
3326       && (xregno == FRAME_POINTER_REGNUM
3327 	  || xregno == HARD_FRAME_POINTER_REGNUM))
3328     return -1;
3329 
3330   if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3331       && xregno == ARG_POINTER_REGNUM)
3332     return -1;
3333 
3334   if (xregno == STACK_POINTER_REGNUM)
3335     return -1;
3336 
3337   /* Try to get the register offset.  */
3338   subreg_get_info (xregno, xmode, offset, ymode, &info);
3339   if (!info.representable_p)
3340     return -1;
3341 
3342   /* Make sure that the offsetted register value is in range.  */
3343   yregno = xregno + info.offset;
3344   if (!HARD_REGISTER_NUM_P (yregno))
3345     return -1;
3346 
3347   /* See whether (reg:YMODE YREGNO) is valid.
3348 
3349      ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3350      This is a kludge to work around how float/complex arguments are passed
3351      on 32-bit SPARC and should be fixed.  */
3352   if (!HARD_REGNO_MODE_OK (yregno, ymode)
3353       && HARD_REGNO_MODE_OK (xregno, xmode))
3354     return -1;
3355 
3356   return (int) yregno;
3357 }
3358 
3359 /* Return the final regno that a subreg expression refers to.  */
3360 unsigned int
3361 subreg_regno (const_rtx x)
3362 {
3363   unsigned int ret;
3364   rtx subreg = SUBREG_REG (x);
3365   int regno = REGNO (subreg);
3366 
3367   ret = regno + subreg_regno_offset (regno,
3368 				     GET_MODE (subreg),
3369 				     SUBREG_BYTE (x),
3370 				     GET_MODE (x));
3371   return ret;
3372 
3373 }
3374 
3375 /* Return the number of registers that a subreg expression refers
3376    to.  */
3377 unsigned int
3378 subreg_nregs (const_rtx x)
3379 {
3380   return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3381 }
3382 
3383 /* Return the number of registers that a subreg REG with REGNO
3384    expression refers to.  This is a copy of the rtlanal.c:subreg_nregs
3385    changed so that the regno can be passed in. */
3386 
3387 unsigned int
3388 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3389 {
3390   struct subreg_info info;
3391   rtx subreg = SUBREG_REG (x);
3392 
3393   subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3394 		   &info);
3395   return info.nregs;
3396 }
3397 
3398 
3399 struct parms_set_data
3400 {
3401   int nregs;
3402   HARD_REG_SET regs;
3403 };
3404 
3405 /* Helper function for noticing stores to parameter registers.  */
3406 static void
3407 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3408 {
3409   struct parms_set_data *const d = (struct parms_set_data *) data;
3410   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3411       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3412     {
3413       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3414       d->nregs--;
3415     }
3416 }
3417 
3418 /* Look backward for first parameter to be loaded.
3419    Note that loads of all parameters will not necessarily be
3420    found if CSE has eliminated some of them (e.g., an argument
3421    to the outer function is passed down as a parameter).
3422    Do not skip BOUNDARY.  */
3423 rtx
3424 find_first_parameter_load (rtx call_insn, rtx boundary)
3425 {
3426   struct parms_set_data parm;
3427   rtx p, before, first_set;
3428 
3429   /* Since different machines initialize their parameter registers
3430      in different orders, assume nothing.  Collect the set of all
3431      parameter registers.  */
3432   CLEAR_HARD_REG_SET (parm.regs);
3433   parm.nregs = 0;
3434   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3435     if (GET_CODE (XEXP (p, 0)) == USE
3436 	&& REG_P (XEXP (XEXP (p, 0), 0)))
3437       {
3438 	gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3439 
3440 	/* We only care about registers which can hold function
3441 	   arguments.  */
3442 	if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3443 	  continue;
3444 
3445 	SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3446 	parm.nregs++;
3447       }
3448   before = call_insn;
3449   first_set = call_insn;
3450 
3451   /* Search backward for the first set of a register in this set.  */
3452   while (parm.nregs && before != boundary)
3453     {
3454       before = PREV_INSN (before);
3455 
3456       /* It is possible that some loads got CSEed from one call to
3457          another.  Stop in that case.  */
3458       if (CALL_P (before))
3459 	break;
3460 
3461       /* Our caller needs either ensure that we will find all sets
3462          (in case code has not been optimized yet), or take care
3463          for possible labels in a way by setting boundary to preceding
3464          CODE_LABEL.  */
3465       if (LABEL_P (before))
3466 	{
3467 	  gcc_assert (before == boundary);
3468 	  break;
3469 	}
3470 
3471       if (INSN_P (before))
3472 	{
3473 	  int nregs_old = parm.nregs;
3474 	  note_stores (PATTERN (before), parms_set, &parm);
3475 	  /* If we found something that did not set a parameter reg,
3476 	     we're done.  Do not keep going, as that might result
3477 	     in hoisting an insn before the setting of a pseudo
3478 	     that is used by the hoisted insn. */
3479 	  if (nregs_old != parm.nregs)
3480 	    first_set = before;
3481 	  else
3482 	    break;
3483 	}
3484     }
3485   return first_set;
3486 }
3487 
3488 /* Return true if we should avoid inserting code between INSN and preceding
3489    call instruction.  */
3490 
3491 bool
3492 keep_with_call_p (const_rtx insn)
3493 {
3494   rtx set;
3495 
3496   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3497     {
3498       if (REG_P (SET_DEST (set))
3499 	  && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3500 	  && fixed_regs[REGNO (SET_DEST (set))]
3501 	  && general_operand (SET_SRC (set), VOIDmode))
3502 	return true;
3503       if (REG_P (SET_SRC (set))
3504 	  && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3505 	  && REG_P (SET_DEST (set))
3506 	  && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3507 	return true;
3508       /* There may be a stack pop just after the call and before the store
3509 	 of the return register.  Search for the actual store when deciding
3510 	 if we can break or not.  */
3511       if (SET_DEST (set) == stack_pointer_rtx)
3512 	{
3513 	  /* This CONST_CAST is okay because next_nonnote_insn just
3514 	     returns its argument and we assign it to a const_rtx
3515 	     variable.  */
3516 	  const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3517 	  if (i2 && keep_with_call_p (i2))
3518 	    return true;
3519 	}
3520     }
3521   return false;
3522 }
3523 
3524 /* Return true if LABEL is a target of JUMP_INSN.  This applies only
3525    to non-complex jumps.  That is, direct unconditional, conditional,
3526    and tablejumps, but not computed jumps or returns.  It also does
3527    not apply to the fallthru case of a conditional jump.  */
3528 
3529 bool
3530 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3531 {
3532   rtx tmp = JUMP_LABEL (jump_insn);
3533 
3534   if (label == tmp)
3535     return true;
3536 
3537   if (tablejump_p (jump_insn, NULL, &tmp))
3538     {
3539       rtvec vec = XVEC (PATTERN (tmp),
3540 			GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3541       int i, veclen = GET_NUM_ELEM (vec);
3542 
3543       for (i = 0; i < veclen; ++i)
3544 	if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3545 	  return true;
3546     }
3547 
3548   if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3549     return true;
3550 
3551   return false;
3552 }
3553 
3554 
3555 /* Return an estimate of the cost of computing rtx X.
3556    One use is in cse, to decide which expression to keep in the hash table.
3557    Another is in rtl generation, to pick the cheapest way to multiply.
3558    Other uses like the latter are expected in the future.
3559 
3560    SPEED parameter specify whether costs optimized for speed or size should
3561    be returned.  */
3562 
3563 int
3564 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED, bool speed)
3565 {
3566   int i, j;
3567   enum rtx_code code;
3568   const char *fmt;
3569   int total;
3570 
3571   if (x == 0)
3572     return 0;
3573 
3574   /* Compute the default costs of certain things.
3575      Note that targetm.rtx_costs can override the defaults.  */
3576 
3577   code = GET_CODE (x);
3578   switch (code)
3579     {
3580     case MULT:
3581       total = COSTS_N_INSNS (5);
3582       break;
3583     case DIV:
3584     case UDIV:
3585     case MOD:
3586     case UMOD:
3587       total = COSTS_N_INSNS (7);
3588       break;
3589     case USE:
3590       /* Used in combine.c as a marker.  */
3591       total = 0;
3592       break;
3593     default:
3594       total = COSTS_N_INSNS (1);
3595     }
3596 
3597   switch (code)
3598     {
3599     case REG:
3600       return 0;
3601 
3602     case SUBREG:
3603       total = 0;
3604       /* If we can't tie these modes, make this expensive.  The larger
3605 	 the mode, the more expensive it is.  */
3606       if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3607 	return COSTS_N_INSNS (2
3608 			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3609       break;
3610 
3611     default:
3612       if (targetm.rtx_costs (x, code, outer_code, &total, speed))
3613 	return total;
3614       break;
3615     }
3616 
3617   /* Sum the costs of the sub-rtx's, plus cost of this operation,
3618      which is already in total.  */
3619 
3620   fmt = GET_RTX_FORMAT (code);
3621   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3622     if (fmt[i] == 'e')
3623       total += rtx_cost (XEXP (x, i), code, speed);
3624     else if (fmt[i] == 'E')
3625       for (j = 0; j < XVECLEN (x, i); j++)
3626 	total += rtx_cost (XVECEXP (x, i, j), code, speed);
3627 
3628   return total;
3629 }
3630 
3631 /* Return cost of address expression X.
3632    Expect that X is properly formed address reference.
3633 
3634    SPEED parameter specify whether costs optimized for speed or size should
3635    be returned.  */
3636 
3637 int
3638 address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
3639 {
3640   /* We may be asked for cost of various unusual addresses, such as operands
3641      of push instruction.  It is not worthwhile to complicate writing
3642      of the target hook by such cases.  */
3643 
3644   if (!memory_address_addr_space_p (mode, x, as))
3645     return 1000;
3646 
3647   return targetm.address_cost (x, speed);
3648 }
3649 
3650 /* If the target doesn't override, compute the cost as with arithmetic.  */
3651 
3652 int
3653 default_address_cost (rtx x, bool speed)
3654 {
3655   return rtx_cost (x, MEM, speed);
3656 }
3657 
3658 
3659 unsigned HOST_WIDE_INT
3660 nonzero_bits (const_rtx x, enum machine_mode mode)
3661 {
3662   return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3663 }
3664 
3665 unsigned int
3666 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3667 {
3668   return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3669 }
3670 
3671 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3672    It avoids exponential behavior in nonzero_bits1 when X has
3673    identical subexpressions on the first or the second level.  */
3674 
3675 static unsigned HOST_WIDE_INT
3676 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3677 		     enum machine_mode known_mode,
3678 		     unsigned HOST_WIDE_INT known_ret)
3679 {
3680   if (x == known_x && mode == known_mode)
3681     return known_ret;
3682 
3683   /* Try to find identical subexpressions.  If found call
3684      nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3685      precomputed value for the subexpression as KNOWN_RET.  */
3686 
3687   if (ARITHMETIC_P (x))
3688     {
3689       rtx x0 = XEXP (x, 0);
3690       rtx x1 = XEXP (x, 1);
3691 
3692       /* Check the first level.  */
3693       if (x0 == x1)
3694 	return nonzero_bits1 (x, mode, x0, mode,
3695 			      cached_nonzero_bits (x0, mode, known_x,
3696 						   known_mode, known_ret));
3697 
3698       /* Check the second level.  */
3699       if (ARITHMETIC_P (x0)
3700 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3701 	return nonzero_bits1 (x, mode, x1, mode,
3702 			      cached_nonzero_bits (x1, mode, known_x,
3703 						   known_mode, known_ret));
3704 
3705       if (ARITHMETIC_P (x1)
3706 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3707 	return nonzero_bits1 (x, mode, x0, mode,
3708 			      cached_nonzero_bits (x0, mode, known_x,
3709 						   known_mode, known_ret));
3710     }
3711 
3712   return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3713 }
3714 
3715 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3716    We don't let nonzero_bits recur into num_sign_bit_copies, because that
3717    is less useful.  We can't allow both, because that results in exponential
3718    run time recursion.  There is a nullstone testcase that triggered
3719    this.  This macro avoids accidental uses of num_sign_bit_copies.  */
3720 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3721 
3722 /* Given an expression, X, compute which bits in X can be nonzero.
3723    We don't care about bits outside of those defined in MODE.
3724 
3725    For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3726    an arithmetic operation, we can do better.  */
3727 
3728 static unsigned HOST_WIDE_INT
3729 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3730 	       enum machine_mode known_mode,
3731 	       unsigned HOST_WIDE_INT known_ret)
3732 {
3733   unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3734   unsigned HOST_WIDE_INT inner_nz;
3735   enum rtx_code code;
3736   unsigned int mode_width = GET_MODE_BITSIZE (mode);
3737 
3738   /* For floating-point and vector values, assume all bits are needed.  */
3739   if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3740       || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
3741     return nonzero;
3742 
3743   /* If X is wider than MODE, use its mode instead.  */
3744   if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3745     {
3746       mode = GET_MODE (x);
3747       nonzero = GET_MODE_MASK (mode);
3748       mode_width = GET_MODE_BITSIZE (mode);
3749     }
3750 
3751   if (mode_width > HOST_BITS_PER_WIDE_INT)
3752     /* Our only callers in this case look for single bit values.  So
3753        just return the mode mask.  Those tests will then be false.  */
3754     return nonzero;
3755 
3756 #ifndef WORD_REGISTER_OPERATIONS
3757   /* If MODE is wider than X, but both are a single word for both the host
3758      and target machines, we can compute this from which bits of the
3759      object might be nonzero in its own mode, taking into account the fact
3760      that on many CISC machines, accessing an object in a wider mode
3761      causes the high-order bits to become undefined.  So they are
3762      not known to be zero.  */
3763 
3764   if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3765       && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3766       && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3767       && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3768     {
3769       nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3770 				      known_x, known_mode, known_ret);
3771       nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3772       return nonzero;
3773     }
3774 #endif
3775 
3776   code = GET_CODE (x);
3777   switch (code)
3778     {
3779     case REG:
3780 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3781       /* If pointers extend unsigned and this is a pointer in Pmode, say that
3782 	 all the bits above ptr_mode are known to be zero.  */
3783       /* As we do not know which address space the pointer is refering to,
3784 	 we can do this only if the target does not support different pointer
3785 	 or address modes depending on the address space.  */
3786       if (target_default_pointer_address_modes_p ()
3787 	  && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3788 	  && REG_POINTER (x))
3789 	nonzero &= GET_MODE_MASK (ptr_mode);
3790 #endif
3791 
3792       /* Include declared information about alignment of pointers.  */
3793       /* ??? We don't properly preserve REG_POINTER changes across
3794 	 pointer-to-integer casts, so we can't trust it except for
3795 	 things that we know must be pointers.  See execute/960116-1.c.  */
3796       if ((x == stack_pointer_rtx
3797 	   || x == frame_pointer_rtx
3798 	   || x == arg_pointer_rtx)
3799 	  && REGNO_POINTER_ALIGN (REGNO (x)))
3800 	{
3801 	  unsigned HOST_WIDE_INT alignment
3802 	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3803 
3804 #ifdef PUSH_ROUNDING
3805 	  /* If PUSH_ROUNDING is defined, it is possible for the
3806 	     stack to be momentarily aligned only to that amount,
3807 	     so we pick the least alignment.  */
3808 	  if (x == stack_pointer_rtx && PUSH_ARGS)
3809 	    alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3810 			     alignment);
3811 #endif
3812 
3813 	  nonzero &= ~(alignment - 1);
3814 	}
3815 
3816       {
3817 	unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3818 	rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3819 					      known_mode, known_ret,
3820 					      &nonzero_for_hook);
3821 
3822 	if (new_rtx)
3823 	  nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
3824 						   known_mode, known_ret);
3825 
3826 	return nonzero_for_hook;
3827       }
3828 
3829     case CONST_INT:
3830 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3831       /* If X is negative in MODE, sign-extend the value.  */
3832       if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3833 	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3834 	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3835 #endif
3836 
3837       return INTVAL (x);
3838 
3839     case MEM:
3840 #ifdef LOAD_EXTEND_OP
3841       /* In many, if not most, RISC machines, reading a byte from memory
3842 	 zeros the rest of the register.  Noticing that fact saves a lot
3843 	 of extra zero-extends.  */
3844       if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3845 	nonzero &= GET_MODE_MASK (GET_MODE (x));
3846 #endif
3847       break;
3848 
3849     case EQ:  case NE:
3850     case UNEQ:  case LTGT:
3851     case GT:  case GTU:  case UNGT:
3852     case LT:  case LTU:  case UNLT:
3853     case GE:  case GEU:  case UNGE:
3854     case LE:  case LEU:  case UNLE:
3855     case UNORDERED: case ORDERED:
3856       /* If this produces an integer result, we know which bits are set.
3857 	 Code here used to clear bits outside the mode of X, but that is
3858 	 now done above.  */
3859       /* Mind that MODE is the mode the caller wants to look at this
3860 	 operation in, and not the actual operation mode.  We can wind
3861 	 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3862 	 that describes the results of a vector compare.  */
3863       if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3864 	  && mode_width <= HOST_BITS_PER_WIDE_INT)
3865 	nonzero = STORE_FLAG_VALUE;
3866       break;
3867 
3868     case NEG:
3869 #if 0
3870       /* Disabled to avoid exponential mutual recursion between nonzero_bits
3871 	 and num_sign_bit_copies.  */
3872       if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3873 	  == GET_MODE_BITSIZE (GET_MODE (x)))
3874 	nonzero = 1;
3875 #endif
3876 
3877       if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3878 	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3879       break;
3880 
3881     case ABS:
3882 #if 0
3883       /* Disabled to avoid exponential mutual recursion between nonzero_bits
3884 	 and num_sign_bit_copies.  */
3885       if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3886 	  == GET_MODE_BITSIZE (GET_MODE (x)))
3887 	nonzero = 1;
3888 #endif
3889       break;
3890 
3891     case TRUNCATE:
3892       nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3893 				       known_x, known_mode, known_ret)
3894 		  & GET_MODE_MASK (mode));
3895       break;
3896 
3897     case ZERO_EXTEND:
3898       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3899 				      known_x, known_mode, known_ret);
3900       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3901 	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3902       break;
3903 
3904     case SIGN_EXTEND:
3905       /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3906 	 Otherwise, show all the bits in the outer mode but not the inner
3907 	 may be nonzero.  */
3908       inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3909 				      known_x, known_mode, known_ret);
3910       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3911 	{
3912 	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3913 	  if (inner_nz
3914 	      & (((HOST_WIDE_INT) 1
3915 		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3916 	    inner_nz |= (GET_MODE_MASK (mode)
3917 			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3918 	}
3919 
3920       nonzero &= inner_nz;
3921       break;
3922 
3923     case AND:
3924       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3925 				       known_x, known_mode, known_ret)
3926       		 & cached_nonzero_bits (XEXP (x, 1), mode,
3927 					known_x, known_mode, known_ret);
3928       break;
3929 
3930     case XOR:   case IOR:
3931     case UMIN:  case UMAX:  case SMIN:  case SMAX:
3932       {
3933 	unsigned HOST_WIDE_INT nonzero0 =
3934 	  cached_nonzero_bits (XEXP (x, 0), mode,
3935 			       known_x, known_mode, known_ret);
3936 
3937 	/* Don't call nonzero_bits for the second time if it cannot change
3938 	   anything.  */
3939 	if ((nonzero & nonzero0) != nonzero)
3940 	  nonzero &= nonzero0
3941       		     | cached_nonzero_bits (XEXP (x, 1), mode,
3942 					    known_x, known_mode, known_ret);
3943       }
3944       break;
3945 
3946     case PLUS:  case MINUS:
3947     case MULT:
3948     case DIV:   case UDIV:
3949     case MOD:   case UMOD:
3950       /* We can apply the rules of arithmetic to compute the number of
3951 	 high- and low-order zero bits of these operations.  We start by
3952 	 computing the width (position of the highest-order nonzero bit)
3953 	 and the number of low-order zero bits for each value.  */
3954       {
3955 	unsigned HOST_WIDE_INT nz0 =
3956 	  cached_nonzero_bits (XEXP (x, 0), mode,
3957 			       known_x, known_mode, known_ret);
3958 	unsigned HOST_WIDE_INT nz1 =
3959 	  cached_nonzero_bits (XEXP (x, 1), mode,
3960 			       known_x, known_mode, known_ret);
3961 	int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3962 	int width0 = floor_log2 (nz0) + 1;
3963 	int width1 = floor_log2 (nz1) + 1;
3964 	int low0 = floor_log2 (nz0 & -nz0);
3965 	int low1 = floor_log2 (nz1 & -nz1);
3966 	HOST_WIDE_INT op0_maybe_minusp
3967 	  = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3968 	HOST_WIDE_INT op1_maybe_minusp
3969 	  = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3970 	unsigned int result_width = mode_width;
3971 	int result_low = 0;
3972 
3973 	switch (code)
3974 	  {
3975 	  case PLUS:
3976 	    result_width = MAX (width0, width1) + 1;
3977 	    result_low = MIN (low0, low1);
3978 	    break;
3979 	  case MINUS:
3980 	    result_low = MIN (low0, low1);
3981 	    break;
3982 	  case MULT:
3983 	    result_width = width0 + width1;
3984 	    result_low = low0 + low1;
3985 	    break;
3986 	  case DIV:
3987 	    if (width1 == 0)
3988 	      break;
3989 	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
3990 	      result_width = width0;
3991 	    break;
3992 	  case UDIV:
3993 	    if (width1 == 0)
3994 	      break;
3995 	    result_width = width0;
3996 	    break;
3997 	  case MOD:
3998 	    if (width1 == 0)
3999 	      break;
4000 	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
4001 	      result_width = MIN (width0, width1);
4002 	    result_low = MIN (low0, low1);
4003 	    break;
4004 	  case UMOD:
4005 	    if (width1 == 0)
4006 	      break;
4007 	    result_width = MIN (width0, width1);
4008 	    result_low = MIN (low0, low1);
4009 	    break;
4010 	  default:
4011 	    gcc_unreachable ();
4012 	  }
4013 
4014 	if (result_width < mode_width)
4015 	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
4016 
4017 	if (result_low > 0)
4018 	  nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
4019 
4020 #ifdef POINTERS_EXTEND_UNSIGNED
4021 	/* If pointers extend unsigned and this is an addition or subtraction
4022 	   to a pointer in Pmode, all the bits above ptr_mode are known to be
4023 	   zero.  */
4024 	/* As we do not know which address space the pointer is refering to,
4025 	   we can do this only if the target does not support different pointer
4026 	   or address modes depending on the address space.  */
4027 	if (target_default_pointer_address_modes_p ()
4028 	    && POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
4029 	    && (code == PLUS || code == MINUS)
4030 	    && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4031 	  nonzero &= GET_MODE_MASK (ptr_mode);
4032 #endif
4033       }
4034       break;
4035 
4036     case ZERO_EXTRACT:
4037       if (CONST_INT_P (XEXP (x, 1))
4038 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4039 	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4040       break;
4041 
4042     case SUBREG:
4043       /* If this is a SUBREG formed for a promoted variable that has
4044 	 been zero-extended, we know that at least the high-order bits
4045 	 are zero, though others might be too.  */
4046 
4047       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4048 	nonzero = GET_MODE_MASK (GET_MODE (x))
4049 		  & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4050 					 known_x, known_mode, known_ret);
4051 
4052       /* If the inner mode is a single word for both the host and target
4053 	 machines, we can compute this from which bits of the inner
4054 	 object might be nonzero.  */
4055       if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4056 	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4057 	      <= HOST_BITS_PER_WIDE_INT))
4058 	{
4059 	  nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4060 					  known_x, known_mode, known_ret);
4061 
4062 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4063 	  /* If this is a typical RISC machine, we only have to worry
4064 	     about the way loads are extended.  */
4065 	  if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4066 	       ? (((nonzero
4067 		    & (((unsigned HOST_WIDE_INT) 1
4068 			<< (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4069 		   != 0))
4070 	       : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
4071 	      || !MEM_P (SUBREG_REG (x)))
4072 #endif
4073 	    {
4074 	      /* On many CISC machines, accessing an object in a wider mode
4075 		 causes the high-order bits to become undefined.  So they are
4076 		 not known to be zero.  */
4077 	      if (GET_MODE_SIZE (GET_MODE (x))
4078 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4079 		nonzero |= (GET_MODE_MASK (GET_MODE (x))
4080 			    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4081 	    }
4082 	}
4083       break;
4084 
4085     case ASHIFTRT:
4086     case LSHIFTRT:
4087     case ASHIFT:
4088     case ROTATE:
4089       /* The nonzero bits are in two classes: any bits within MODE
4090 	 that aren't in GET_MODE (x) are always significant.  The rest of the
4091 	 nonzero bits are those that are significant in the operand of
4092 	 the shift when shifted the appropriate number of bits.  This
4093 	 shows that high-order bits are cleared by the right shift and
4094 	 low-order bits by left shifts.  */
4095       if (CONST_INT_P (XEXP (x, 1))
4096 	  && INTVAL (XEXP (x, 1)) >= 0
4097 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4098 	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
4099 	{
4100 	  enum machine_mode inner_mode = GET_MODE (x);
4101 	  unsigned int width = GET_MODE_BITSIZE (inner_mode);
4102 	  int count = INTVAL (XEXP (x, 1));
4103 	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4104 	  unsigned HOST_WIDE_INT op_nonzero =
4105 	    cached_nonzero_bits (XEXP (x, 0), mode,
4106 				 known_x, known_mode, known_ret);
4107 	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4108 	  unsigned HOST_WIDE_INT outer = 0;
4109 
4110 	  if (mode_width > width)
4111 	    outer = (op_nonzero & nonzero & ~mode_mask);
4112 
4113 	  if (code == LSHIFTRT)
4114 	    inner >>= count;
4115 	  else if (code == ASHIFTRT)
4116 	    {
4117 	      inner >>= count;
4118 
4119 	      /* If the sign bit may have been nonzero before the shift, we
4120 		 need to mark all the places it could have been copied to
4121 		 by the shift as possibly nonzero.  */
4122 	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4123 		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4124 	    }
4125 	  else if (code == ASHIFT)
4126 	    inner <<= count;
4127 	  else
4128 	    inner = ((inner << (count % width)
4129 		      | (inner >> (width - (count % width)))) & mode_mask);
4130 
4131 	  nonzero &= (outer | inner);
4132 	}
4133       break;
4134 
4135     case FFS:
4136     case POPCOUNT:
4137       /* This is at most the number of bits in the mode.  */
4138       nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4139       break;
4140 
4141     case CLZ:
4142       /* If CLZ has a known value at zero, then the nonzero bits are
4143 	 that value, plus the number of bits in the mode minus one.  */
4144       if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4145 	nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4146       else
4147 	nonzero = -1;
4148       break;
4149 
4150     case CTZ:
4151       /* If CTZ has a known value at zero, then the nonzero bits are
4152 	 that value, plus the number of bits in the mode minus one.  */
4153       if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4154 	nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4155       else
4156 	nonzero = -1;
4157       break;
4158 
4159     case PARITY:
4160       nonzero = 1;
4161       break;
4162 
4163     case IF_THEN_ELSE:
4164       {
4165 	unsigned HOST_WIDE_INT nonzero_true =
4166 	  cached_nonzero_bits (XEXP (x, 1), mode,
4167 			       known_x, known_mode, known_ret);
4168 
4169 	/* Don't call nonzero_bits for the second time if it cannot change
4170 	   anything.  */
4171 	if ((nonzero & nonzero_true) != nonzero)
4172 	  nonzero &= nonzero_true
4173       		     | cached_nonzero_bits (XEXP (x, 2), mode,
4174 					    known_x, known_mode, known_ret);
4175       }
4176       break;
4177 
4178     default:
4179       break;
4180     }
4181 
4182   return nonzero;
4183 }
4184 
4185 /* See the macro definition above.  */
4186 #undef cached_num_sign_bit_copies
4187 
4188 
4189 /* The function cached_num_sign_bit_copies is a wrapper around
4190    num_sign_bit_copies1.  It avoids exponential behavior in
4191    num_sign_bit_copies1 when X has identical subexpressions on the
4192    first or the second level.  */
4193 
4194 static unsigned int
4195 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4196 			    enum machine_mode known_mode,
4197 			    unsigned int known_ret)
4198 {
4199   if (x == known_x && mode == known_mode)
4200     return known_ret;
4201 
4202   /* Try to find identical subexpressions.  If found call
4203      num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4204      the precomputed value for the subexpression as KNOWN_RET.  */
4205 
4206   if (ARITHMETIC_P (x))
4207     {
4208       rtx x0 = XEXP (x, 0);
4209       rtx x1 = XEXP (x, 1);
4210 
4211       /* Check the first level.  */
4212       if (x0 == x1)
4213 	return
4214 	  num_sign_bit_copies1 (x, mode, x0, mode,
4215 				cached_num_sign_bit_copies (x0, mode, known_x,
4216 							    known_mode,
4217 							    known_ret));
4218 
4219       /* Check the second level.  */
4220       if (ARITHMETIC_P (x0)
4221 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4222 	return
4223 	  num_sign_bit_copies1 (x, mode, x1, mode,
4224 				cached_num_sign_bit_copies (x1, mode, known_x,
4225 							    known_mode,
4226 							    known_ret));
4227 
4228       if (ARITHMETIC_P (x1)
4229 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4230 	return
4231 	  num_sign_bit_copies1 (x, mode, x0, mode,
4232 				cached_num_sign_bit_copies (x0, mode, known_x,
4233 							    known_mode,
4234 							    known_ret));
4235     }
4236 
4237   return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4238 }
4239 
4240 /* Return the number of bits at the high-order end of X that are known to
4241    be equal to the sign bit.  X will be used in mode MODE; if MODE is
4242    VOIDmode, X will be used in its own mode.  The returned value  will always
4243    be between 1 and the number of bits in MODE.  */
4244 
4245 static unsigned int
4246 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4247 		      enum machine_mode known_mode,
4248 		      unsigned int known_ret)
4249 {
4250   enum rtx_code code = GET_CODE (x);
4251   unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4252   int num0, num1, result;
4253   unsigned HOST_WIDE_INT nonzero;
4254 
4255   /* If we weren't given a mode, use the mode of X.  If the mode is still
4256      VOIDmode, we don't know anything.  Likewise if one of the modes is
4257      floating-point.  */
4258 
4259   if (mode == VOIDmode)
4260     mode = GET_MODE (x);
4261 
4262   if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4263       || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4264     return 1;
4265 
4266   /* For a smaller object, just ignore the high bits.  */
4267   if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4268     {
4269       num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4270 					 known_x, known_mode, known_ret);
4271       return MAX (1,
4272 		  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4273     }
4274 
4275   if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4276     {
4277 #ifndef WORD_REGISTER_OPERATIONS
4278   /* If this machine does not do all register operations on the entire
4279      register and MODE is wider than the mode of X, we can say nothing
4280      at all about the high-order bits.  */
4281       return 1;
4282 #else
4283       /* Likewise on machines that do, if the mode of the object is smaller
4284 	 than a word and loads of that size don't sign extend, we can say
4285 	 nothing about the high order bits.  */
4286       if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4287 #ifdef LOAD_EXTEND_OP
4288 	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4289 #endif
4290 	  )
4291 	return 1;
4292 #endif
4293     }
4294 
4295   switch (code)
4296     {
4297     case REG:
4298 
4299 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4300       /* If pointers extend signed and this is a pointer in Pmode, say that
4301 	 all the bits above ptr_mode are known to be sign bit copies.  */
4302       /* As we do not know which address space the pointer is refering to,
4303 	 we can do this only if the target does not support different pointer
4304 	 or address modes depending on the address space.  */
4305       if (target_default_pointer_address_modes_p ()
4306 	  && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4307 	  && mode == Pmode && REG_POINTER (x))
4308 	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4309 #endif
4310 
4311       {
4312 	unsigned int copies_for_hook = 1, copies = 1;
4313 	rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4314 						     known_mode, known_ret,
4315 						     &copies_for_hook);
4316 
4317 	if (new_rtx)
4318 	  copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4319 					       known_mode, known_ret);
4320 
4321 	if (copies > 1 || copies_for_hook > 1)
4322 	  return MAX (copies, copies_for_hook);
4323 
4324 	/* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
4325       }
4326       break;
4327 
4328     case MEM:
4329 #ifdef LOAD_EXTEND_OP
4330       /* Some RISC machines sign-extend all loads of smaller than a word.  */
4331       if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4332 	return MAX (1, ((int) bitwidth
4333 			- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4334 #endif
4335       break;
4336 
4337     case CONST_INT:
4338       /* If the constant is negative, take its 1's complement and remask.
4339 	 Then see how many zero bits we have.  */
4340       nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4341       if (bitwidth <= HOST_BITS_PER_WIDE_INT
4342 	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4343 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
4344 
4345       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4346 
4347     case SUBREG:
4348       /* If this is a SUBREG for a promoted object that is sign-extended
4349 	 and we are looking at it in a wider mode, we know that at least the
4350 	 high-order bits are known to be sign bit copies.  */
4351 
4352       if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4353 	{
4354 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4355 					     known_x, known_mode, known_ret);
4356 	  return MAX ((int) bitwidth
4357 		      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4358 		      num0);
4359 	}
4360 
4361       /* For a smaller object, just ignore the high bits.  */
4362       if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4363 	{
4364 	  num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4365 					     known_x, known_mode, known_ret);
4366 	  return MAX (1, (num0
4367 			  - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4368 				   - bitwidth)));
4369 	}
4370 
4371 #ifdef WORD_REGISTER_OPERATIONS
4372 #ifdef LOAD_EXTEND_OP
4373       /* For paradoxical SUBREGs on machines where all register operations
4374 	 affect the entire register, just look inside.  Note that we are
4375 	 passing MODE to the recursive call, so the number of sign bit copies
4376 	 will remain relative to that mode, not the inner mode.  */
4377 
4378       /* This works only if loads sign extend.  Otherwise, if we get a
4379 	 reload for the inner part, it may be loaded from the stack, and
4380 	 then we lose all sign bit copies that existed before the store
4381 	 to the stack.  */
4382 
4383       if ((GET_MODE_SIZE (GET_MODE (x))
4384 	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4385 	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4386 	  && MEM_P (SUBREG_REG (x)))
4387 	return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4388 					   known_x, known_mode, known_ret);
4389 #endif
4390 #endif
4391       break;
4392 
4393     case SIGN_EXTRACT:
4394       if (CONST_INT_P (XEXP (x, 1)))
4395 	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4396       break;
4397 
4398     case SIGN_EXTEND:
4399       return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4400 	      + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4401 					    known_x, known_mode, known_ret));
4402 
4403     case TRUNCATE:
4404       /* For a smaller object, just ignore the high bits.  */
4405       num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4406 					 known_x, known_mode, known_ret);
4407       return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4408 				    - bitwidth)));
4409 
4410     case NOT:
4411       return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4412 					 known_x, known_mode, known_ret);
4413 
4414     case ROTATE:       case ROTATERT:
4415       /* If we are rotating left by a number of bits less than the number
4416 	 of sign bit copies, we can just subtract that amount from the
4417 	 number.  */
4418       if (CONST_INT_P (XEXP (x, 1))
4419 	  && INTVAL (XEXP (x, 1)) >= 0
4420 	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4421 	{
4422 	  num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4423 					     known_x, known_mode, known_ret);
4424 	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4425 				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4426 	}
4427       break;
4428 
4429     case NEG:
4430       /* In general, this subtracts one sign bit copy.  But if the value
4431 	 is known to be positive, the number of sign bit copies is the
4432 	 same as that of the input.  Finally, if the input has just one bit
4433 	 that might be nonzero, all the bits are copies of the sign bit.  */
4434       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4435 					 known_x, known_mode, known_ret);
4436       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4437 	return num0 > 1 ? num0 - 1 : 1;
4438 
4439       nonzero = nonzero_bits (XEXP (x, 0), mode);
4440       if (nonzero == 1)
4441 	return bitwidth;
4442 
4443       if (num0 > 1
4444 	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4445 	num0--;
4446 
4447       return num0;
4448 
4449     case IOR:   case AND:   case XOR:
4450     case SMIN:  case SMAX:  case UMIN:  case UMAX:
4451       /* Logical operations will preserve the number of sign-bit copies.
4452 	 MIN and MAX operations always return one of the operands.  */
4453       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4454 					 known_x, known_mode, known_ret);
4455       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4456 					 known_x, known_mode, known_ret);
4457 
4458       /* If num1 is clearing some of the top bits then regardless of
4459 	 the other term, we are guaranteed to have at least that many
4460 	 high-order zero bits.  */
4461       if (code == AND
4462 	  && num1 > 1
4463 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
4464 	  && CONST_INT_P (XEXP (x, 1))
4465 	  && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4466 	return num1;
4467 
4468       /* Similarly for IOR when setting high-order bits.  */
4469       if (code == IOR
4470 	  && num1 > 1
4471 	  && bitwidth <= HOST_BITS_PER_WIDE_INT
4472 	  && CONST_INT_P (XEXP (x, 1))
4473 	  && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4474 	return num1;
4475 
4476       return MIN (num0, num1);
4477 
4478     case PLUS:  case MINUS:
4479       /* For addition and subtraction, we can have a 1-bit carry.  However,
4480 	 if we are subtracting 1 from a positive number, there will not
4481 	 be such a carry.  Furthermore, if the positive number is known to
4482 	 be 0 or 1, we know the result is either -1 or 0.  */
4483 
4484       if (code == PLUS && XEXP (x, 1) == constm1_rtx
4485 	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
4486 	{
4487 	  nonzero = nonzero_bits (XEXP (x, 0), mode);
4488 	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4489 	    return (nonzero == 1 || nonzero == 0 ? bitwidth
4490 		    : bitwidth - floor_log2 (nonzero) - 1);
4491 	}
4492 
4493       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4494 					 known_x, known_mode, known_ret);
4495       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4496 					 known_x, known_mode, known_ret);
4497       result = MAX (1, MIN (num0, num1) - 1);
4498 
4499 #ifdef POINTERS_EXTEND_UNSIGNED
4500       /* If pointers extend signed and this is an addition or subtraction
4501 	 to a pointer in Pmode, all the bits above ptr_mode are known to be
4502 	 sign bit copies.  */
4503       /* As we do not know which address space the pointer is refering to,
4504 	 we can do this only if the target does not support different pointer
4505 	 or address modes depending on the address space.  */
4506       if (target_default_pointer_address_modes_p ()
4507 	  && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4508 	  && (code == PLUS || code == MINUS)
4509 	  && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4510 	result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4511 			     - GET_MODE_BITSIZE (ptr_mode) + 1),
4512 		      result);
4513 #endif
4514       return result;
4515 
4516     case MULT:
4517       /* The number of bits of the product is the sum of the number of
4518 	 bits of both terms.  However, unless one of the terms if known
4519 	 to be positive, we must allow for an additional bit since negating
4520 	 a negative number can remove one sign bit copy.  */
4521 
4522       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4523 					 known_x, known_mode, known_ret);
4524       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4525 					 known_x, known_mode, known_ret);
4526 
4527       result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4528       if (result > 0
4529 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4530 	      || (((nonzero_bits (XEXP (x, 0), mode)
4531 		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4532 		  && ((nonzero_bits (XEXP (x, 1), mode)
4533 		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4534 	result--;
4535 
4536       return MAX (1, result);
4537 
4538     case UDIV:
4539       /* The result must be <= the first operand.  If the first operand
4540 	 has the high bit set, we know nothing about the number of sign
4541 	 bit copies.  */
4542       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4543 	return 1;
4544       else if ((nonzero_bits (XEXP (x, 0), mode)
4545 		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4546 	return 1;
4547       else
4548 	return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4549 					   known_x, known_mode, known_ret);
4550 
4551     case UMOD:
4552       /* The result must be <= the second operand.  If the second operand
4553 	 has (or just might have) the high bit set, we know nothing about
4554 	 the number of sign bit copies.  */
4555       if (bitwidth > HOST_BITS_PER_WIDE_INT)
4556 	return 1;
4557       else if ((nonzero_bits (XEXP (x, 1), mode)
4558 		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4559 	return 1;
4560       else
4561 	return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4562 					   known_x, known_mode, known_ret);
4563 
4564     case DIV:
4565       /* Similar to unsigned division, except that we have to worry about
4566 	 the case where the divisor is negative, in which case we have
4567 	 to add 1.  */
4568       result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4569 					   known_x, known_mode, known_ret);
4570       if (result > 1
4571 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4572 	      || (nonzero_bits (XEXP (x, 1), mode)
4573 		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4574 	result--;
4575 
4576       return result;
4577 
4578     case MOD:
4579       result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4580 					   known_x, known_mode, known_ret);
4581       if (result > 1
4582 	  && (bitwidth > HOST_BITS_PER_WIDE_INT
4583 	      || (nonzero_bits (XEXP (x, 1), mode)
4584 		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4585 	result--;
4586 
4587       return result;
4588 
4589     case ASHIFTRT:
4590       /* Shifts by a constant add to the number of bits equal to the
4591 	 sign bit.  */
4592       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4593 					 known_x, known_mode, known_ret);
4594       if (CONST_INT_P (XEXP (x, 1))
4595 	  && INTVAL (XEXP (x, 1)) > 0
4596 	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
4597 	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4598 
4599       return num0;
4600 
4601     case ASHIFT:
4602       /* Left shifts destroy copies.  */
4603       if (!CONST_INT_P (XEXP (x, 1))
4604 	  || INTVAL (XEXP (x, 1)) < 0
4605 	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4606 	  || INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x)))
4607 	return 1;
4608 
4609       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4610 					 known_x, known_mode, known_ret);
4611       return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4612 
4613     case IF_THEN_ELSE:
4614       num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4615 					 known_x, known_mode, known_ret);
4616       num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4617 					 known_x, known_mode, known_ret);
4618       return MIN (num0, num1);
4619 
4620     case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
4621     case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
4622     case GEU: case GTU: case LEU: case LTU:
4623     case UNORDERED: case ORDERED:
4624       /* If the constant is negative, take its 1's complement and remask.
4625 	 Then see how many zero bits we have.  */
4626       nonzero = STORE_FLAG_VALUE;
4627       if (bitwidth <= HOST_BITS_PER_WIDE_INT
4628 	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4629 	nonzero = (~nonzero) & GET_MODE_MASK (mode);
4630 
4631       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4632 
4633     default:
4634       break;
4635     }
4636 
4637   /* If we haven't been able to figure it out by one of the above rules,
4638      see if some of the high-order bits are known to be zero.  If so,
4639      count those bits and return one less than that amount.  If we can't
4640      safely compute the mask for this mode, always return BITWIDTH.  */
4641 
4642   bitwidth = GET_MODE_BITSIZE (mode);
4643   if (bitwidth > HOST_BITS_PER_WIDE_INT)
4644     return 1;
4645 
4646   nonzero = nonzero_bits (x, mode);
4647   return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4648 	 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4649 }
4650 
4651 /* Calculate the rtx_cost of a single instruction.  A return value of
4652    zero indicates an instruction pattern without a known cost.  */
4653 
4654 int
4655 insn_rtx_cost (rtx pat, bool speed)
4656 {
4657   int i, cost;
4658   rtx set;
4659 
4660   /* Extract the single set rtx from the instruction pattern.
4661      We can't use single_set since we only have the pattern.  */
4662   if (GET_CODE (pat) == SET)
4663     set = pat;
4664   else if (GET_CODE (pat) == PARALLEL)
4665     {
4666       set = NULL_RTX;
4667       for (i = 0; i < XVECLEN (pat, 0); i++)
4668 	{
4669 	  rtx x = XVECEXP (pat, 0, i);
4670 	  if (GET_CODE (x) == SET)
4671 	    {
4672 	      if (set)
4673 		return 0;
4674 	      set = x;
4675 	    }
4676 	}
4677       if (!set)
4678 	return 0;
4679     }
4680   else
4681     return 0;
4682 
4683   cost = rtx_cost (SET_SRC (set), SET, speed);
4684   return cost > 0 ? cost : COSTS_N_INSNS (1);
4685 }
4686 
4687 /* Given an insn INSN and condition COND, return the condition in a
4688    canonical form to simplify testing by callers.  Specifically:
4689 
4690    (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4691    (2) Both operands will be machine operands; (cc0) will have been replaced.
4692    (3) If an operand is a constant, it will be the second operand.
4693    (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4694        for GE, GEU, and LEU.
4695 
4696    If the condition cannot be understood, or is an inequality floating-point
4697    comparison which needs to be reversed, 0 will be returned.
4698 
4699    If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4700 
4701    If EARLIEST is nonzero, it is a pointer to a place where the earliest
4702    insn used in locating the condition was found.  If a replacement test
4703    of the condition is desired, it should be placed in front of that
4704    insn and we will be sure that the inputs are still valid.
4705 
4706    If WANT_REG is nonzero, we wish the condition to be relative to that
4707    register, if possible.  Therefore, do not canonicalize the condition
4708    further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
4709    to be a compare to a CC mode register.
4710 
4711    If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4712    and at INSN.  */
4713 
4714 rtx
4715 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4716 			rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4717 {
4718   enum rtx_code code;
4719   rtx prev = insn;
4720   const_rtx set;
4721   rtx tem;
4722   rtx op0, op1;
4723   int reverse_code = 0;
4724   enum machine_mode mode;
4725   basic_block bb = BLOCK_FOR_INSN (insn);
4726 
4727   code = GET_CODE (cond);
4728   mode = GET_MODE (cond);
4729   op0 = XEXP (cond, 0);
4730   op1 = XEXP (cond, 1);
4731 
4732   if (reverse)
4733     code = reversed_comparison_code (cond, insn);
4734   if (code == UNKNOWN)
4735     return 0;
4736 
4737   if (earliest)
4738     *earliest = insn;
4739 
4740   /* If we are comparing a register with zero, see if the register is set
4741      in the previous insn to a COMPARE or a comparison operation.  Perform
4742      the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4743      in cse.c  */
4744 
4745   while ((GET_RTX_CLASS (code) == RTX_COMPARE
4746 	  || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4747 	 && op1 == CONST0_RTX (GET_MODE (op0))
4748 	 && op0 != want_reg)
4749     {
4750       /* Set nonzero when we find something of interest.  */
4751       rtx x = 0;
4752 
4753 #ifdef HAVE_cc0
4754       /* If comparison with cc0, import actual comparison from compare
4755 	 insn.  */
4756       if (op0 == cc0_rtx)
4757 	{
4758 	  if ((prev = prev_nonnote_insn (prev)) == 0
4759 	      || !NONJUMP_INSN_P (prev)
4760 	      || (set = single_set (prev)) == 0
4761 	      || SET_DEST (set) != cc0_rtx)
4762 	    return 0;
4763 
4764 	  op0 = SET_SRC (set);
4765 	  op1 = CONST0_RTX (GET_MODE (op0));
4766 	  if (earliest)
4767 	    *earliest = prev;
4768 	}
4769 #endif
4770 
4771       /* If this is a COMPARE, pick up the two things being compared.  */
4772       if (GET_CODE (op0) == COMPARE)
4773 	{
4774 	  op1 = XEXP (op0, 1);
4775 	  op0 = XEXP (op0, 0);
4776 	  continue;
4777 	}
4778       else if (!REG_P (op0))
4779 	break;
4780 
4781       /* Go back to the previous insn.  Stop if it is not an INSN.  We also
4782 	 stop if it isn't a single set or if it has a REG_INC note because
4783 	 we don't want to bother dealing with it.  */
4784 
4785       prev = prev_nonnote_nondebug_insn (prev);
4786 
4787       if (prev == 0
4788 	  || !NONJUMP_INSN_P (prev)
4789 	  || FIND_REG_INC_NOTE (prev, NULL_RTX)
4790 	  /* In cfglayout mode, there do not have to be labels at the
4791 	     beginning of a block, or jumps at the end, so the previous
4792 	     conditions would not stop us when we reach bb boundary.  */
4793 	  || BLOCK_FOR_INSN (prev) != bb)
4794 	break;
4795 
4796       set = set_of (op0, prev);
4797 
4798       if (set
4799 	  && (GET_CODE (set) != SET
4800 	      || !rtx_equal_p (SET_DEST (set), op0)))
4801 	break;
4802 
4803       /* If this is setting OP0, get what it sets it to if it looks
4804 	 relevant.  */
4805       if (set)
4806 	{
4807 	  enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4808 #ifdef FLOAT_STORE_FLAG_VALUE
4809 	  REAL_VALUE_TYPE fsfv;
4810 #endif
4811 
4812 	  /* ??? We may not combine comparisons done in a CCmode with
4813 	     comparisons not done in a CCmode.  This is to aid targets
4814 	     like Alpha that have an IEEE compliant EQ instruction, and
4815 	     a non-IEEE compliant BEQ instruction.  The use of CCmode is
4816 	     actually artificial, simply to prevent the combination, but
4817 	     should not affect other platforms.
4818 
4819 	     However, we must allow VOIDmode comparisons to match either
4820 	     CCmode or non-CCmode comparison, because some ports have
4821 	     modeless comparisons inside branch patterns.
4822 
4823 	     ??? This mode check should perhaps look more like the mode check
4824 	     in simplify_comparison in combine.  */
4825 
4826 	  if ((GET_CODE (SET_SRC (set)) == COMPARE
4827 	       || (((code == NE
4828 		     || (code == LT
4829 			 && GET_MODE_CLASS (inner_mode) == MODE_INT
4830 			 && (GET_MODE_BITSIZE (inner_mode)
4831 			     <= HOST_BITS_PER_WIDE_INT)
4832 			 && (STORE_FLAG_VALUE
4833 			     & ((HOST_WIDE_INT) 1
4834 				<< (GET_MODE_BITSIZE (inner_mode) - 1))))
4835 #ifdef FLOAT_STORE_FLAG_VALUE
4836 		     || (code == LT
4837 			 && SCALAR_FLOAT_MODE_P (inner_mode)
4838 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4839 			     REAL_VALUE_NEGATIVE (fsfv)))
4840 #endif
4841 		     ))
4842 		   && COMPARISON_P (SET_SRC (set))))
4843 	      && (((GET_MODE_CLASS (mode) == MODE_CC)
4844 		   == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4845 		  || mode == VOIDmode || inner_mode == VOIDmode))
4846 	    x = SET_SRC (set);
4847 	  else if (((code == EQ
4848 		     || (code == GE
4849 			 && (GET_MODE_BITSIZE (inner_mode)
4850 			     <= HOST_BITS_PER_WIDE_INT)
4851 			 && GET_MODE_CLASS (inner_mode) == MODE_INT
4852 			 && (STORE_FLAG_VALUE
4853 			     & ((HOST_WIDE_INT) 1
4854 				<< (GET_MODE_BITSIZE (inner_mode) - 1))))
4855 #ifdef FLOAT_STORE_FLAG_VALUE
4856 		     || (code == GE
4857 			 && SCALAR_FLOAT_MODE_P (inner_mode)
4858 			 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4859 			     REAL_VALUE_NEGATIVE (fsfv)))
4860 #endif
4861 		     ))
4862 		   && COMPARISON_P (SET_SRC (set))
4863 		   && (((GET_MODE_CLASS (mode) == MODE_CC)
4864 			== (GET_MODE_CLASS (inner_mode) == MODE_CC))
4865 		       || mode == VOIDmode || inner_mode == VOIDmode))
4866 
4867 	    {
4868 	      reverse_code = 1;
4869 	      x = SET_SRC (set);
4870 	    }
4871 	  else
4872 	    break;
4873 	}
4874 
4875       else if (reg_set_p (op0, prev))
4876 	/* If this sets OP0, but not directly, we have to give up.  */
4877 	break;
4878 
4879       if (x)
4880 	{
4881 	  /* If the caller is expecting the condition to be valid at INSN,
4882 	     make sure X doesn't change before INSN.  */
4883 	  if (valid_at_insn_p)
4884 	    if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4885 	      break;
4886 	  if (COMPARISON_P (x))
4887 	    code = GET_CODE (x);
4888 	  if (reverse_code)
4889 	    {
4890 	      code = reversed_comparison_code (x, prev);
4891 	      if (code == UNKNOWN)
4892 		return 0;
4893 	      reverse_code = 0;
4894 	    }
4895 
4896 	  op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4897 	  if (earliest)
4898 	    *earliest = prev;
4899 	}
4900     }
4901 
4902   /* If constant is first, put it last.  */
4903   if (CONSTANT_P (op0))
4904     code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4905 
4906   /* If OP0 is the result of a comparison, we weren't able to find what
4907      was really being compared, so fail.  */
4908   if (!allow_cc_mode
4909       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4910     return 0;
4911 
4912   /* Canonicalize any ordered comparison with integers involving equality
4913      if we can do computations in the relevant mode and we do not
4914      overflow.  */
4915 
4916   if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4917       && CONST_INT_P (op1)
4918       && GET_MODE (op0) != VOIDmode
4919       && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4920     {
4921       HOST_WIDE_INT const_val = INTVAL (op1);
4922       unsigned HOST_WIDE_INT uconst_val = const_val;
4923       unsigned HOST_WIDE_INT max_val
4924 	= (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4925 
4926       switch (code)
4927 	{
4928 	case LE:
4929 	  if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4930 	    code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4931 	  break;
4932 
4933 	/* When cross-compiling, const_val might be sign-extended from
4934 	   BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4935 	case GE:
4936 	  if ((HOST_WIDE_INT) (const_val & max_val)
4937 	      != (((HOST_WIDE_INT) 1
4938 		   << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4939 	    code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4940 	  break;
4941 
4942 	case LEU:
4943 	  if (uconst_val < max_val)
4944 	    code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4945 	  break;
4946 
4947 	case GEU:
4948 	  if (uconst_val != 0)
4949 	    code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4950 	  break;
4951 
4952 	default:
4953 	  break;
4954 	}
4955     }
4956 
4957   /* Never return CC0; return zero instead.  */
4958   if (CC0_P (op0))
4959     return 0;
4960 
4961   return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4962 }
4963 
4964 /* Given a jump insn JUMP, return the condition that will cause it to branch
4965    to its JUMP_LABEL.  If the condition cannot be understood, or is an
4966    inequality floating-point comparison which needs to be reversed, 0 will
4967    be returned.
4968 
4969    If EARLIEST is nonzero, it is a pointer to a place where the earliest
4970    insn used in locating the condition was found.  If a replacement test
4971    of the condition is desired, it should be placed in front of that
4972    insn and we will be sure that the inputs are still valid.  If EARLIEST
4973    is null, the returned condition will be valid at INSN.
4974 
4975    If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4976    compare CC mode register.
4977 
4978    VALID_AT_INSN_P is the same as for canonicalize_condition.  */
4979 
4980 rtx
4981 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4982 {
4983   rtx cond;
4984   int reverse;
4985   rtx set;
4986 
4987   /* If this is not a standard conditional jump, we can't parse it.  */
4988   if (!JUMP_P (jump)
4989       || ! any_condjump_p (jump))
4990     return 0;
4991   set = pc_set (jump);
4992 
4993   cond = XEXP (SET_SRC (set), 0);
4994 
4995   /* If this branches to JUMP_LABEL when the condition is false, reverse
4996      the condition.  */
4997   reverse
4998     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4999       && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
5000 
5001   return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5002 				 allow_cc_mode, valid_at_insn_p);
5003 }
5004 
5005 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5006    TARGET_MODE_REP_EXTENDED.
5007 
5008    Note that we assume that the property of
5009    TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5010    narrower than mode B.  I.e., if A is a mode narrower than B then in
5011    order to be able to operate on it in mode B, mode A needs to
5012    satisfy the requirements set by the representation of mode B.  */
5013 
5014 static void
5015 init_num_sign_bit_copies_in_rep (void)
5016 {
5017   enum machine_mode mode, in_mode;
5018 
5019   for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5020        in_mode = GET_MODE_WIDER_MODE (mode))
5021     for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5022 	 mode = GET_MODE_WIDER_MODE (mode))
5023       {
5024 	enum machine_mode i;
5025 
5026 	/* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5027 	   extends to the next widest mode.  */
5028 	gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5029 		    || GET_MODE_WIDER_MODE (mode) == in_mode);
5030 
5031 	/* We are in in_mode.  Count how many bits outside of mode
5032 	   have to be copies of the sign-bit.  */
5033 	for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5034 	  {
5035 	    enum machine_mode wider = GET_MODE_WIDER_MODE (i);
5036 
5037 	    if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5038 		/* We can only check sign-bit copies starting from the
5039 		   top-bit.  In order to be able to check the bits we
5040 		   have already seen we pretend that subsequent bits
5041 		   have to be sign-bit copies too.  */
5042 		|| num_sign_bit_copies_in_rep [in_mode][mode])
5043 	      num_sign_bit_copies_in_rep [in_mode][mode]
5044 		+= GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
5045 	  }
5046       }
5047 }
5048 
5049 /* Suppose that truncation from the machine mode of X to MODE is not a
5050    no-op.  See if there is anything special about X so that we can
5051    assume it already contains a truncated value of MODE.  */
5052 
5053 bool
5054 truncated_to_mode (enum machine_mode mode, const_rtx x)
5055 {
5056   /* This register has already been used in MODE without explicit
5057      truncation.  */
5058   if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5059     return true;
5060 
5061   /* See if we already satisfy the requirements of MODE.  If yes we
5062      can just switch to MODE.  */
5063   if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5064       && (num_sign_bit_copies (x, GET_MODE (x))
5065 	  >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5066     return true;
5067 
5068   return false;
5069 }
5070 
5071 /* Initialize non_rtx_starting_operands, which is used to speed up
5072    for_each_rtx.  */
5073 void
5074 init_rtlanal (void)
5075 {
5076   int i;
5077   for (i = 0; i < NUM_RTX_CODE; i++)
5078     {
5079       const char *format = GET_RTX_FORMAT (i);
5080       const char *first = strpbrk (format, "eEV");
5081       non_rtx_starting_operands[i] = first ? first - format : -1;
5082     }
5083 
5084   init_num_sign_bit_copies_in_rep ();
5085 }
5086 
5087 /* Check whether this is a constant pool constant.  */
5088 bool
5089 constant_pool_constant_p (rtx x)
5090 {
5091   x = avoid_constant_pool_reference (x);
5092   return GET_CODE (x) == CONST_DOUBLE;
5093 }
5094 
5095 /* If M is a bitmask that selects a field of low-order bits within an item but
5096    not the entire word, return the length of the field.  Return -1 otherwise.
5097    M is used in machine mode MODE.  */
5098 
5099 int
5100 low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
5101 {
5102   if (mode != VOIDmode)
5103     {
5104       if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
5105 	return -1;
5106       m &= GET_MODE_MASK (mode);
5107     }
5108 
5109   return exact_log2 (m + 1);
5110 }
5111