xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/recog.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Subroutines used by or related to instruction recognition.
2    Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "target.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
47 
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
55 
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
63 
64 #ifndef HAVE_ATTR_enabled
65 static inline bool
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
67 {
68   return true;
69 }
70 #endif
71 
72 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
73 static void validate_replace_src_1 (rtx *, void *);
74 static rtx split_insn (rtx);
75 
76 /* Nonzero means allow operands to be volatile.
77    This should be 0 if you are generating rtl, such as if you are calling
78    the functions in optabs.c and expmed.c (most of the time).
79    This should be 1 if all valid insns need to be recognized,
80    such as in reginfo.c and final.c and reload.c.
81 
82    init_recog and init_recog_no_volatile are responsible for setting this.  */
83 
84 int volatile_ok;
85 
86 struct recog_data recog_data;
87 
88 /* Contains a vector of operand_alternative structures for every operand.
89    Set up by preprocess_constraints.  */
90 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
91 
92 /* On return from `constrain_operands', indicate which alternative
93    was satisfied.  */
94 
95 int which_alternative;
96 
97 /* Nonzero after end of reload pass.
98    Set to 1 or 0 by toplev.c.
99    Controls the significance of (SUBREG (MEM)).  */
100 
101 int reload_completed;
102 
103 /* Nonzero after thread_prologue_and_epilogue_insns has run.  */
104 int epilogue_completed;
105 
106 /* Initialize data used by the function `recog'.
107    This must be called once in the compilation of a function
108    before any insn recognition may be done in the function.  */
109 
110 void
111 init_recog_no_volatile (void)
112 {
113   volatile_ok = 0;
114 }
115 
116 void
117 init_recog (void)
118 {
119   volatile_ok = 1;
120 }
121 
122 
123 /* Check that X is an insn-body for an `asm' with operands
124    and that the operands mentioned in it are legitimate.  */
125 
126 int
127 check_asm_operands (rtx x)
128 {
129   int noperands;
130   rtx *operands;
131   const char **constraints;
132   int i;
133 
134   /* Post-reload, be more strict with things.  */
135   if (reload_completed)
136     {
137       /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
138       extract_insn (make_insn_raw (x));
139       constrain_operands (1);
140       return which_alternative >= 0;
141     }
142 
143   noperands = asm_noperands (x);
144   if (noperands < 0)
145     return 0;
146   if (noperands == 0)
147     return 1;
148 
149   operands = XALLOCAVEC (rtx, noperands);
150   constraints = XALLOCAVEC (const char *, noperands);
151 
152   decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
153 
154   for (i = 0; i < noperands; i++)
155     {
156       const char *c = constraints[i];
157       if (c[0] == '%')
158 	c++;
159       if (! asm_operand_ok (operands[i], c, constraints))
160 	return 0;
161     }
162 
163   return 1;
164 }
165 
166 /* Static data for the next two routines.  */
167 
168 typedef struct change_t
169 {
170   rtx object;
171   int old_code;
172   rtx *loc;
173   rtx old;
174   bool unshare;
175 } change_t;
176 
177 static change_t *changes;
178 static int changes_allocated;
179 
180 static int num_changes = 0;
181 
182 /* Validate a proposed change to OBJECT.  LOC is the location in the rtl
183    at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
184    the change is simply made.
185 
186    Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
187    will be called with the address and mode as parameters.  If OBJECT is
188    an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
189    the change in place.
190 
191    IN_GROUP is nonzero if this is part of a group of changes that must be
192    performed as a group.  In that case, the changes will be stored.  The
193    function `apply_change_group' will validate and apply the changes.
194 
195    If IN_GROUP is zero, this is a single change.  Try to recognize the insn
196    or validate the memory reference with the change applied.  If the result
197    is not valid for the machine, suppress the change and return zero.
198    Otherwise, perform the change and return 1.  */
199 
200 static bool
201 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
202 {
203   rtx old = *loc;
204 
205   if (old == new_rtx || rtx_equal_p (old, new_rtx))
206     return 1;
207 
208   gcc_assert (in_group != 0 || num_changes == 0);
209 
210   *loc = new_rtx;
211 
212   /* Save the information describing this change.  */
213   if (num_changes >= changes_allocated)
214     {
215       if (changes_allocated == 0)
216 	/* This value allows for repeated substitutions inside complex
217 	   indexed addresses, or changes in up to 5 insns.  */
218 	changes_allocated = MAX_RECOG_OPERANDS * 5;
219       else
220 	changes_allocated *= 2;
221 
222       changes = XRESIZEVEC (change_t, changes, changes_allocated);
223     }
224 
225   changes[num_changes].object = object;
226   changes[num_changes].loc = loc;
227   changes[num_changes].old = old;
228   changes[num_changes].unshare = unshare;
229 
230   if (object && !MEM_P (object))
231     {
232       /* Set INSN_CODE to force rerecognition of insn.  Save old code in
233 	 case invalid.  */
234       changes[num_changes].old_code = INSN_CODE (object);
235       INSN_CODE (object) = -1;
236     }
237 
238   num_changes++;
239 
240   /* If we are making a group of changes, return 1.  Otherwise, validate the
241      change group we made.  */
242 
243   if (in_group)
244     return 1;
245   else
246     return apply_change_group ();
247 }
248 
249 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
250    UNSHARE to false.  */
251 
252 bool
253 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
254 {
255   return validate_change_1 (object, loc, new_rtx, in_group, false);
256 }
257 
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259    UNSHARE to true.  */
260 
261 bool
262 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
263 {
264   return validate_change_1 (object, loc, new_rtx, in_group, true);
265 }
266 
267 
268 /* Keep X canonicalized if some changes have made it non-canonical; only
269    modifies the operands of X, not (for example) its code.  Simplifications
270    are not the job of this routine.
271 
272    Return true if anything was changed.  */
273 bool
274 canonicalize_change_group (rtx insn, rtx x)
275 {
276   if (COMMUTATIVE_P (x)
277       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
278     {
279       /* Oops, the caller has made X no longer canonical.
280 	 Let's redo the changes in the correct order.  */
281       rtx tem = XEXP (x, 0);
282       validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
283       validate_change (insn, &XEXP (x, 1), tem, 1);
284       return true;
285     }
286   else
287     return false;
288 }
289 
290 
291 /* This subroutine of apply_change_group verifies whether the changes to INSN
292    were valid; i.e. whether INSN can still be recognized.  */
293 
294 int
295 insn_invalid_p (rtx insn)
296 {
297   rtx pat = PATTERN (insn);
298   int num_clobbers = 0;
299   /* If we are before reload and the pattern is a SET, see if we can add
300      clobbers.  */
301   int icode = recog (pat, insn,
302 		     (GET_CODE (pat) == SET
303 		      && ! reload_completed && ! reload_in_progress)
304 		     ? &num_clobbers : 0);
305   int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
306 
307 
308   /* If this is an asm and the operand aren't legal, then fail.  Likewise if
309      this is not an asm and the insn wasn't recognized.  */
310   if ((is_asm && ! check_asm_operands (PATTERN (insn)))
311       || (!is_asm && icode < 0))
312     return 1;
313 
314   /* If we have to add CLOBBERs, fail if we have to add ones that reference
315      hard registers since our callers can't know if they are live or not.
316      Otherwise, add them.  */
317   if (num_clobbers > 0)
318     {
319       rtx newpat;
320 
321       if (added_clobbers_hard_reg_p (icode))
322 	return 1;
323 
324       newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
325       XVECEXP (newpat, 0, 0) = pat;
326       add_clobbers (newpat, icode);
327       PATTERN (insn) = pat = newpat;
328     }
329 
330   /* After reload, verify that all constraints are satisfied.  */
331   if (reload_completed)
332     {
333       extract_insn (insn);
334 
335       if (! constrain_operands (1))
336 	return 1;
337     }
338 
339   INSN_CODE (insn) = icode;
340   return 0;
341 }
342 
343 /* Return number of changes made and not validated yet.  */
344 int
345 num_changes_pending (void)
346 {
347   return num_changes;
348 }
349 
350 /* Tentatively apply the changes numbered NUM and up.
351    Return 1 if all changes are valid, zero otherwise.  */
352 
353 int
354 verify_changes (int num)
355 {
356   int i;
357   rtx last_validated = NULL_RTX;
358 
359   /* The changes have been applied and all INSN_CODEs have been reset to force
360      rerecognition.
361 
362      The changes are valid if we aren't given an object, or if we are
363      given a MEM and it still is a valid address, or if this is in insn
364      and it is recognized.  In the latter case, if reload has completed,
365      we also require that the operands meet the constraints for
366      the insn.  */
367 
368   for (i = num; i < num_changes; i++)
369     {
370       rtx object = changes[i].object;
371 
372       /* If there is no object to test or if it is the same as the one we
373          already tested, ignore it.  */
374       if (object == 0 || object == last_validated)
375 	continue;
376 
377       if (MEM_P (object))
378 	{
379 	  if (! memory_address_addr_space_p (GET_MODE (object),
380 					     XEXP (object, 0),
381 					     MEM_ADDR_SPACE (object)))
382 	    break;
383 	}
384       else if (REG_P (changes[i].old)
385 	       && asm_noperands (PATTERN (object)) > 0
386 	       && REG_EXPR (changes[i].old) != NULL_TREE
387 	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
388 	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
389 	{
390 	  /* Don't allow changes of hard register operands to inline
391 	     assemblies if they have been defined as register asm ("x").  */
392 	  break;
393 	}
394       else if (DEBUG_INSN_P (object))
395 	continue;
396       else if (insn_invalid_p (object))
397 	{
398 	  rtx pat = PATTERN (object);
399 
400 	  /* Perhaps we couldn't recognize the insn because there were
401 	     extra CLOBBERs at the end.  If so, try to re-recognize
402 	     without the last CLOBBER (later iterations will cause each of
403 	     them to be eliminated, in turn).  But don't do this if we
404 	     have an ASM_OPERAND.  */
405 	  if (GET_CODE (pat) == PARALLEL
406 	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
407 	      && asm_noperands (PATTERN (object)) < 0)
408 	    {
409 	      rtx newpat;
410 
411 	      if (XVECLEN (pat, 0) == 2)
412 		newpat = XVECEXP (pat, 0, 0);
413 	      else
414 		{
415 		  int j;
416 
417 		  newpat
418 		    = gen_rtx_PARALLEL (VOIDmode,
419 					rtvec_alloc (XVECLEN (pat, 0) - 1));
420 		  for (j = 0; j < XVECLEN (newpat, 0); j++)
421 		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
422 		}
423 
424 	      /* Add a new change to this group to replace the pattern
425 		 with this new pattern.  Then consider this change
426 		 as having succeeded.  The change we added will
427 		 cause the entire call to fail if things remain invalid.
428 
429 		 Note that this can lose if a later change than the one
430 		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
431 		 but this shouldn't occur.  */
432 
433 	      validate_change (object, &PATTERN (object), newpat, 1);
434 	      continue;
435 	    }
436 	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
437 		   || GET_CODE (pat) == VAR_LOCATION)
438 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
439 	       never recognized.  */
440 	    continue;
441 	  else
442 	    break;
443 	}
444       last_validated = object;
445     }
446 
447   return (i == num_changes);
448 }
449 
450 /* A group of changes has previously been issued with validate_change
451    and verified with verify_changes.  Call df_insn_rescan for each of
452    the insn changed and clear num_changes.  */
453 
454 void
455 confirm_change_group (void)
456 {
457   int i;
458   rtx last_object = NULL;
459 
460   for (i = 0; i < num_changes; i++)
461     {
462       rtx object = changes[i].object;
463 
464       if (changes[i].unshare)
465 	*changes[i].loc = copy_rtx (*changes[i].loc);
466 
467       /* Avoid unnecessary rescanning when multiple changes to same instruction
468          are made.  */
469       if (object)
470 	{
471 	  if (object != last_object && last_object && INSN_P (last_object))
472 	    df_insn_rescan (last_object);
473 	  last_object = object;
474 	}
475     }
476 
477   if (last_object && INSN_P (last_object))
478     df_insn_rescan (last_object);
479   num_changes = 0;
480 }
481 
482 /* Apply a group of changes previously issued with `validate_change'.
483    If all changes are valid, call confirm_change_group and return 1,
484    otherwise, call cancel_changes and return 0.  */
485 
486 int
487 apply_change_group (void)
488 {
489   if (verify_changes (0))
490     {
491       confirm_change_group ();
492       return 1;
493     }
494   else
495     {
496       cancel_changes (0);
497       return 0;
498     }
499 }
500 
501 
502 /* Return the number of changes so far in the current group.  */
503 
504 int
505 num_validated_changes (void)
506 {
507   return num_changes;
508 }
509 
510 /* Retract the changes numbered NUM and up.  */
511 
512 void
513 cancel_changes (int num)
514 {
515   int i;
516 
517   /* Back out all the changes.  Do this in the opposite order in which
518      they were made.  */
519   for (i = num_changes - 1; i >= num; i--)
520     {
521       *changes[i].loc = changes[i].old;
522       if (changes[i].object && !MEM_P (changes[i].object))
523 	INSN_CODE (changes[i].object) = changes[i].old_code;
524     }
525   num_changes = num;
526 }
527 
528 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
529    rtx.  */
530 
531 static void
532 simplify_while_replacing (rtx *loc, rtx to, rtx object,
533                           enum machine_mode op0_mode)
534 {
535   rtx x = *loc;
536   enum rtx_code code = GET_CODE (x);
537   rtx new_rtx;
538 
539   if (SWAPPABLE_OPERANDS_P (x)
540       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
541     {
542       validate_unshare_change (object, loc,
543 			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
544 					       : swap_condition (code),
545 					       GET_MODE (x), XEXP (x, 1),
546 					       XEXP (x, 0)), 1);
547       x = *loc;
548       code = GET_CODE (x);
549     }
550 
551   switch (code)
552     {
553     case PLUS:
554       /* If we have a PLUS whose second operand is now a CONST_INT, use
555          simplify_gen_binary to try to simplify it.
556          ??? We may want later to remove this, once simplification is
557          separated from this function.  */
558       if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
559 	validate_change (object, loc,
560 			 simplify_gen_binary
561 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
562       break;
563     case MINUS:
564       if (CONST_INT_P (XEXP (x, 1))
565 	  || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
566 	validate_change (object, loc,
567 			 simplify_gen_binary
568 			 (PLUS, GET_MODE (x), XEXP (x, 0),
569 			  simplify_gen_unary (NEG,
570 					      GET_MODE (x), XEXP (x, 1),
571 					      GET_MODE (x))), 1);
572       break;
573     case ZERO_EXTEND:
574     case SIGN_EXTEND:
575       if (GET_MODE (XEXP (x, 0)) == VOIDmode)
576 	{
577 	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
578 				    op0_mode);
579 	  /* If any of the above failed, substitute in something that
580 	     we know won't be recognized.  */
581 	  if (!new_rtx)
582 	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
583 	  validate_change (object, loc, new_rtx, 1);
584 	}
585       break;
586     case SUBREG:
587       /* All subregs possible to simplify should be simplified.  */
588       new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
589 			     SUBREG_BYTE (x));
590 
591       /* Subregs of VOIDmode operands are incorrect.  */
592       if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
593 	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
594       if (new_rtx)
595 	validate_change (object, loc, new_rtx, 1);
596       break;
597     case ZERO_EXTRACT:
598     case SIGN_EXTRACT:
599       /* If we are replacing a register with memory, try to change the memory
600          to be the mode required for memory in extract operations (this isn't
601          likely to be an insertion operation; if it was, nothing bad will
602          happen, we might just fail in some cases).  */
603 
604       if (MEM_P (XEXP (x, 0))
605 	  && CONST_INT_P (XEXP (x, 1))
606 	  && CONST_INT_P (XEXP (x, 2))
607 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
608 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
609 	{
610 	  enum machine_mode wanted_mode = VOIDmode;
611 	  enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
612 	  int pos = INTVAL (XEXP (x, 2));
613 
614 	  if (GET_CODE (x) == ZERO_EXTRACT)
615 	    {
616 	      enum machine_mode new_mode
617 		= mode_for_extraction (EP_extzv, 1);
618 	      if (new_mode != MAX_MACHINE_MODE)
619 		wanted_mode = new_mode;
620 	    }
621 	  else if (GET_CODE (x) == SIGN_EXTRACT)
622 	    {
623 	      enum machine_mode new_mode
624 		= mode_for_extraction (EP_extv, 1);
625 	      if (new_mode != MAX_MACHINE_MODE)
626 		wanted_mode = new_mode;
627 	    }
628 
629 	  /* If we have a narrower mode, we can do something.  */
630 	  if (wanted_mode != VOIDmode
631 	      && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
632 	    {
633 	      int offset = pos / BITS_PER_UNIT;
634 	      rtx newmem;
635 
636 	      /* If the bytes and bits are counted differently, we
637 	         must adjust the offset.  */
638 	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
639 		offset =
640 		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
641 		   offset);
642 
643 	      pos %= GET_MODE_BITSIZE (wanted_mode);
644 
645 	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
646 
647 	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
648 	      validate_change (object, &XEXP (x, 0), newmem, 1);
649 	    }
650 	}
651 
652       break;
653 
654     default:
655       break;
656     }
657 }
658 
659 /* Replace every occurrence of FROM in X with TO.  Mark each change with
660    validate_change passing OBJECT.  */
661 
662 static void
663 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
664                         bool simplify)
665 {
666   int i, j;
667   const char *fmt;
668   rtx x = *loc;
669   enum rtx_code code;
670   enum machine_mode op0_mode = VOIDmode;
671   int prev_changes = num_changes;
672 
673   if (!x)
674     return;
675 
676   code = GET_CODE (x);
677   fmt = GET_RTX_FORMAT (code);
678   if (fmt[0] == 'e')
679     op0_mode = GET_MODE (XEXP (x, 0));
680 
681   /* X matches FROM if it is the same rtx or they are both referring to the
682      same register in the same mode.  Avoid calling rtx_equal_p unless the
683      operands look similar.  */
684 
685   if (x == from
686       || (REG_P (x) && REG_P (from)
687 	  && GET_MODE (x) == GET_MODE (from)
688 	  && REGNO (x) == REGNO (from))
689       || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
690 	  && rtx_equal_p (x, from)))
691     {
692       validate_unshare_change (object, loc, to, 1);
693       return;
694     }
695 
696   /* Call ourself recursively to perform the replacements.
697      We must not replace inside already replaced expression, otherwise we
698      get infinite recursion for replacements like (reg X)->(subreg (reg X))
699      done by regmove, so we must special case shared ASM_OPERANDS.  */
700 
701   if (GET_CODE (x) == PARALLEL)
702     {
703       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
704 	{
705 	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
706 	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
707 	    {
708 	      /* Verify that operands are really shared.  */
709 	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
710 			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
711 							      (x, 0, j))));
712 	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
713 				      from, to, object, simplify);
714 	    }
715 	  else
716 	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
717                                     simplify);
718 	}
719     }
720   else
721     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
722       {
723 	if (fmt[i] == 'e')
724 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
725 	else if (fmt[i] == 'E')
726 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
727 	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
728                                     simplify);
729       }
730 
731   /* If we didn't substitute, there is nothing more to do.  */
732   if (num_changes == prev_changes)
733     return;
734 
735   /* Allow substituted expression to have different mode.  This is used by
736      regmove to change mode of pseudo register.  */
737   if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
738     op0_mode = GET_MODE (XEXP (x, 0));
739 
740   /* Do changes needed to keep rtx consistent.  Don't do any other
741      simplifications, as it is not our job.  */
742   if (simplify)
743     simplify_while_replacing (loc, to, object, op0_mode);
744 }
745 
746 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
747    with TO.  After all changes have been made, validate by seeing
748    if INSN is still valid.  */
749 
750 int
751 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
752 {
753   validate_replace_rtx_1 (loc, from, to, insn, true);
754   return apply_change_group ();
755 }
756 
757 /* Try replacing every occurrence of FROM in INSN with TO.  After all
758    changes have been made, validate by seeing if INSN is still valid.  */
759 
760 int
761 validate_replace_rtx (rtx from, rtx to, rtx insn)
762 {
763   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
764   return apply_change_group ();
765 }
766 
767 /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
768    is a part of INSN.  After all changes have been made, validate by seeing if
769    INSN is still valid.
770    validate_replace_rtx (from, to, insn) is equivalent to
771    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
772 
773 int
774 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
775 {
776   validate_replace_rtx_1 (where, from, to, insn, true);
777   return apply_change_group ();
778 }
779 
780 /* Same as above, but do not simplify rtx afterwards.  */
781 int
782 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
783                                       rtx insn)
784 {
785   validate_replace_rtx_1 (where, from, to, insn, false);
786   return apply_change_group ();
787 
788 }
789 
790 /* Try replacing every occurrence of FROM in INSN with TO.  This also
791    will replace in REG_EQUAL and REG_EQUIV notes.  */
792 
793 void
794 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
795 {
796   rtx note;
797   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
798   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
799     if (REG_NOTE_KIND (note) == REG_EQUAL
800 	|| REG_NOTE_KIND (note) == REG_EQUIV)
801       validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
802 }
803 
804 /* Function called by note_uses to replace used subexpressions.  */
805 struct validate_replace_src_data
806 {
807   rtx from;			/* Old RTX */
808   rtx to;			/* New RTX */
809   rtx insn;			/* Insn in which substitution is occurring.  */
810 };
811 
812 static void
813 validate_replace_src_1 (rtx *x, void *data)
814 {
815   struct validate_replace_src_data *d
816     = (struct validate_replace_src_data *) data;
817 
818   validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
819 }
820 
821 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
822    SET_DESTs.  */
823 
824 void
825 validate_replace_src_group (rtx from, rtx to, rtx insn)
826 {
827   struct validate_replace_src_data d;
828 
829   d.from = from;
830   d.to = to;
831   d.insn = insn;
832   note_uses (&PATTERN (insn), validate_replace_src_1, &d);
833 }
834 
835 /* Try simplify INSN.
836    Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
837    pattern and return true if something was simplified.  */
838 
839 bool
840 validate_simplify_insn (rtx insn)
841 {
842   int i;
843   rtx pat = NULL;
844   rtx newpat = NULL;
845 
846   pat = PATTERN (insn);
847 
848   if (GET_CODE (pat) == SET)
849     {
850       newpat = simplify_rtx (SET_SRC (pat));
851       if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
852 	validate_change (insn, &SET_SRC (pat), newpat, 1);
853       newpat = simplify_rtx (SET_DEST (pat));
854       if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
855 	validate_change (insn, &SET_DEST (pat), newpat, 1);
856     }
857   else if (GET_CODE (pat) == PARALLEL)
858     for (i = 0; i < XVECLEN (pat, 0); i++)
859       {
860 	rtx s = XVECEXP (pat, 0, i);
861 
862 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
863 	  {
864 	    newpat = simplify_rtx (SET_SRC (s));
865 	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
866 	      validate_change (insn, &SET_SRC (s), newpat, 1);
867 	    newpat = simplify_rtx (SET_DEST (s));
868 	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
869 	      validate_change (insn, &SET_DEST (s), newpat, 1);
870 	  }
871       }
872   return ((num_changes_pending () > 0) && (apply_change_group () > 0));
873 }
874 
875 #ifdef HAVE_cc0
876 /* Return 1 if the insn using CC0 set by INSN does not contain
877    any ordered tests applied to the condition codes.
878    EQ and NE tests do not count.  */
879 
880 int
881 next_insn_tests_no_inequality (rtx insn)
882 {
883   rtx next = next_cc0_user (insn);
884 
885   /* If there is no next insn, we have to take the conservative choice.  */
886   if (next == 0)
887     return 0;
888 
889   return (INSN_P (next)
890 	  && ! inequality_comparisons_p (PATTERN (next)));
891 }
892 #endif
893 
894 /* Return 1 if OP is a valid general operand for machine mode MODE.
895    This is either a register reference, a memory reference,
896    or a constant.  In the case of a memory reference, the address
897    is checked for general validity for the target machine.
898 
899    Register and memory references must have mode MODE in order to be valid,
900    but some constants have no machine mode and are valid for any mode.
901 
902    If MODE is VOIDmode, OP is checked for validity for whatever mode
903    it has.
904 
905    The main use of this function is as a predicate in match_operand
906    expressions in the machine description.
907 
908    For an explanation of this function's behavior for registers of
909    class NO_REGS, see the comment for `register_operand'.  */
910 
911 int
912 general_operand (rtx op, enum machine_mode mode)
913 {
914   enum rtx_code code = GET_CODE (op);
915 
916   if (mode == VOIDmode)
917     mode = GET_MODE (op);
918 
919   /* Don't accept CONST_INT or anything similar
920      if the caller wants something floating.  */
921   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
922       && GET_MODE_CLASS (mode) != MODE_INT
923       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
924     return 0;
925 
926   if (CONST_INT_P (op)
927       && mode != VOIDmode
928       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
929     return 0;
930 
931   if (CONSTANT_P (op))
932     return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
933 	     || mode == VOIDmode)
934 	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
935 	    && LEGITIMATE_CONSTANT_P (op));
936 
937   /* Except for certain constants with VOIDmode, already checked for,
938      OP's mode must match MODE if MODE specifies a mode.  */
939 
940   if (GET_MODE (op) != mode)
941     return 0;
942 
943   if (code == SUBREG)
944     {
945       rtx sub = SUBREG_REG (op);
946 
947 #ifdef INSN_SCHEDULING
948       /* On machines that have insn scheduling, we want all memory
949 	 reference to be explicit, so outlaw paradoxical SUBREGs.
950 	 However, we must allow them after reload so that they can
951 	 get cleaned up by cleanup_subreg_operands.  */
952       if (!reload_completed && MEM_P (sub)
953 	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
954 	return 0;
955 #endif
956       /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
957          may result in incorrect reference.  We should simplify all valid
958          subregs of MEM anyway.  But allow this after reload because we
959 	 might be called from cleanup_subreg_operands.
960 
961 	 ??? This is a kludge.  */
962       if (!reload_completed && SUBREG_BYTE (op) != 0
963 	  && MEM_P (sub))
964 	return 0;
965 
966       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
967 	 create such rtl, and we must reject it.  */
968       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
969 	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
970 	return 0;
971 
972       op = sub;
973       code = GET_CODE (op);
974     }
975 
976   if (code == REG)
977     /* A register whose class is NO_REGS is not a general operand.  */
978     return (REGNO (op) >= FIRST_PSEUDO_REGISTER
979 	    || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
980 
981   if (code == MEM)
982     {
983       rtx y = XEXP (op, 0);
984 
985       if (! volatile_ok && MEM_VOLATILE_P (op))
986 	return 0;
987 
988       /* Use the mem's mode, since it will be reloaded thus.  */
989       if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
990 	return 1;
991     }
992 
993   return 0;
994 }
995 
996 /* Return 1 if OP is a valid memory address for a memory reference
997    of mode MODE.
998 
999    The main use of this function is as a predicate in match_operand
1000    expressions in the machine description.  */
1001 
1002 int
1003 address_operand (rtx op, enum machine_mode mode)
1004 {
1005   return memory_address_p (mode, op);
1006 }
1007 
1008 /* Return 1 if OP is a register reference of mode MODE.
1009    If MODE is VOIDmode, accept a register in any mode.
1010 
1011    The main use of this function is as a predicate in match_operand
1012    expressions in the machine description.
1013 
1014    As a special exception, registers whose class is NO_REGS are
1015    not accepted by `register_operand'.  The reason for this change
1016    is to allow the representation of special architecture artifacts
1017    (such as a condition code register) without extending the rtl
1018    definitions.  Since registers of class NO_REGS cannot be used
1019    as registers in any case where register classes are examined,
1020    it is most consistent to keep this function from accepting them.  */
1021 
1022 int
1023 register_operand (rtx op, enum machine_mode mode)
1024 {
1025   if (GET_MODE (op) != mode && mode != VOIDmode)
1026     return 0;
1027 
1028   if (GET_CODE (op) == SUBREG)
1029     {
1030       rtx sub = SUBREG_REG (op);
1031 
1032       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1033 	 because it is guaranteed to be reloaded into one.
1034 	 Just make sure the MEM is valid in itself.
1035 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1036 	 but currently it does result from (SUBREG (REG)...) where the
1037 	 reg went on the stack.)  */
1038       if (! reload_completed && MEM_P (sub))
1039 	return general_operand (op, mode);
1040 
1041 #ifdef CANNOT_CHANGE_MODE_CLASS
1042       if (REG_P (sub)
1043 	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1044 	  && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1045 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1046 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1047 	return 0;
1048 #endif
1049 
1050       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1051 	 create such rtl, and we must reject it.  */
1052       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1053 	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1054 	return 0;
1055 
1056       op = sub;
1057     }
1058 
1059   /* We don't consider registers whose class is NO_REGS
1060      to be a register operand.  */
1061   return (REG_P (op)
1062 	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1063 	      || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1064 }
1065 
1066 /* Return 1 for a register in Pmode; ignore the tested mode.  */
1067 
1068 int
1069 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1070 {
1071   return register_operand (op, Pmode);
1072 }
1073 
1074 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1075    or a hard register.  */
1076 
1077 int
1078 scratch_operand (rtx op, enum machine_mode mode)
1079 {
1080   if (GET_MODE (op) != mode && mode != VOIDmode)
1081     return 0;
1082 
1083   return (GET_CODE (op) == SCRATCH
1084 	  || (REG_P (op)
1085 	      && REGNO (op) < FIRST_PSEUDO_REGISTER));
1086 }
1087 
1088 /* Return 1 if OP is a valid immediate operand for mode MODE.
1089 
1090    The main use of this function is as a predicate in match_operand
1091    expressions in the machine description.  */
1092 
1093 int
1094 immediate_operand (rtx op, enum machine_mode mode)
1095 {
1096   /* Don't accept CONST_INT or anything similar
1097      if the caller wants something floating.  */
1098   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1099       && GET_MODE_CLASS (mode) != MODE_INT
1100       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1101     return 0;
1102 
1103   if (CONST_INT_P (op)
1104       && mode != VOIDmode
1105       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1106     return 0;
1107 
1108   return (CONSTANT_P (op)
1109 	  && (GET_MODE (op) == mode || mode == VOIDmode
1110 	      || GET_MODE (op) == VOIDmode)
1111 	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1112 	  && LEGITIMATE_CONSTANT_P (op));
1113 }
1114 
1115 /* Returns 1 if OP is an operand that is a CONST_INT.  */
1116 
1117 int
1118 const_int_operand (rtx op, enum machine_mode mode)
1119 {
1120   if (!CONST_INT_P (op))
1121     return 0;
1122 
1123   if (mode != VOIDmode
1124       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1125     return 0;
1126 
1127   return 1;
1128 }
1129 
1130 /* Returns 1 if OP is an operand that is a constant integer or constant
1131    floating-point number.  */
1132 
1133 int
1134 const_double_operand (rtx op, enum machine_mode mode)
1135 {
1136   /* Don't accept CONST_INT or anything similar
1137      if the caller wants something floating.  */
1138   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1139       && GET_MODE_CLASS (mode) != MODE_INT
1140       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1141     return 0;
1142 
1143   return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1144 	  && (mode == VOIDmode || GET_MODE (op) == mode
1145 	      || GET_MODE (op) == VOIDmode));
1146 }
1147 
1148 /* Return 1 if OP is a general operand that is not an immediate operand.  */
1149 
1150 int
1151 nonimmediate_operand (rtx op, enum machine_mode mode)
1152 {
1153   return (general_operand (op, mode) && ! CONSTANT_P (op));
1154 }
1155 
1156 /* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1157 
1158 int
1159 nonmemory_operand (rtx op, enum machine_mode mode)
1160 {
1161   if (CONSTANT_P (op))
1162     {
1163       /* Don't accept CONST_INT or anything similar
1164 	 if the caller wants something floating.  */
1165       if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1166 	  && GET_MODE_CLASS (mode) != MODE_INT
1167 	  && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1168 	return 0;
1169 
1170       if (CONST_INT_P (op)
1171 	  && mode != VOIDmode
1172 	  && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1173 	return 0;
1174 
1175       return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1176 	       || mode == VOIDmode)
1177 	      && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1178 	      && LEGITIMATE_CONSTANT_P (op));
1179     }
1180 
1181   if (GET_MODE (op) != mode && mode != VOIDmode)
1182     return 0;
1183 
1184   if (GET_CODE (op) == SUBREG)
1185     {
1186       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1187 	 because it is guaranteed to be reloaded into one.
1188 	 Just make sure the MEM is valid in itself.
1189 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1190 	 but currently it does result from (SUBREG (REG)...) where the
1191 	 reg went on the stack.)  */
1192       if (! reload_completed && MEM_P (SUBREG_REG (op)))
1193 	return general_operand (op, mode);
1194       op = SUBREG_REG (op);
1195     }
1196 
1197   /* We don't consider registers whose class is NO_REGS
1198      to be a register operand.  */
1199   return (REG_P (op)
1200 	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1201 	      || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1202 }
1203 
1204 /* Return 1 if OP is a valid operand that stands for pushing a
1205    value of mode MODE onto the stack.
1206 
1207    The main use of this function is as a predicate in match_operand
1208    expressions in the machine description.  */
1209 
1210 int
1211 push_operand (rtx op, enum machine_mode mode)
1212 {
1213   unsigned int rounded_size = GET_MODE_SIZE (mode);
1214 
1215 #ifdef PUSH_ROUNDING
1216   rounded_size = PUSH_ROUNDING (rounded_size);
1217 #endif
1218 
1219   if (!MEM_P (op))
1220     return 0;
1221 
1222   if (mode != VOIDmode && GET_MODE (op) != mode)
1223     return 0;
1224 
1225   op = XEXP (op, 0);
1226 
1227   if (rounded_size == GET_MODE_SIZE (mode))
1228     {
1229       if (GET_CODE (op) != STACK_PUSH_CODE)
1230 	return 0;
1231     }
1232   else
1233     {
1234       if (GET_CODE (op) != PRE_MODIFY
1235 	  || GET_CODE (XEXP (op, 1)) != PLUS
1236 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1237 	  || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1238 #ifdef STACK_GROWS_DOWNWARD
1239 	  || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1240 #else
1241 	  || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1242 #endif
1243 	  )
1244 	return 0;
1245     }
1246 
1247   return XEXP (op, 0) == stack_pointer_rtx;
1248 }
1249 
1250 /* Return 1 if OP is a valid operand that stands for popping a
1251    value of mode MODE off the stack.
1252 
1253    The main use of this function is as a predicate in match_operand
1254    expressions in the machine description.  */
1255 
1256 int
1257 pop_operand (rtx op, enum machine_mode mode)
1258 {
1259   if (!MEM_P (op))
1260     return 0;
1261 
1262   if (mode != VOIDmode && GET_MODE (op) != mode)
1263     return 0;
1264 
1265   op = XEXP (op, 0);
1266 
1267   if (GET_CODE (op) != STACK_POP_CODE)
1268     return 0;
1269 
1270   return XEXP (op, 0) == stack_pointer_rtx;
1271 }
1272 
1273 /* Return 1 if ADDR is a valid memory address
1274    for mode MODE in address space AS.  */
1275 
1276 int
1277 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1278 			     rtx addr, addr_space_t as)
1279 {
1280 #ifdef GO_IF_LEGITIMATE_ADDRESS
1281   gcc_assert (ADDR_SPACE_GENERIC_P (as));
1282   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1283   return 0;
1284 
1285  win:
1286   return 1;
1287 #else
1288   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1289 #endif
1290 }
1291 
1292 /* Return 1 if OP is a valid memory reference with mode MODE,
1293    including a valid address.
1294 
1295    The main use of this function is as a predicate in match_operand
1296    expressions in the machine description.  */
1297 
1298 int
1299 memory_operand (rtx op, enum machine_mode mode)
1300 {
1301   rtx inner;
1302 
1303   if (! reload_completed)
1304     /* Note that no SUBREG is a memory operand before end of reload pass,
1305        because (SUBREG (MEM...)) forces reloading into a register.  */
1306     return MEM_P (op) && general_operand (op, mode);
1307 
1308   if (mode != VOIDmode && GET_MODE (op) != mode)
1309     return 0;
1310 
1311   inner = op;
1312   if (GET_CODE (inner) == SUBREG)
1313     inner = SUBREG_REG (inner);
1314 
1315   return (MEM_P (inner) && general_operand (op, mode));
1316 }
1317 
1318 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1319    that is, a memory reference whose address is a general_operand.  */
1320 
1321 int
1322 indirect_operand (rtx op, enum machine_mode mode)
1323 {
1324   /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1325   if (! reload_completed
1326       && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1327     {
1328       int offset = SUBREG_BYTE (op);
1329       rtx inner = SUBREG_REG (op);
1330 
1331       if (mode != VOIDmode && GET_MODE (op) != mode)
1332 	return 0;
1333 
1334       /* The only way that we can have a general_operand as the resulting
1335 	 address is if OFFSET is zero and the address already is an operand
1336 	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1337 	 operand.  */
1338 
1339       return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1340 	      || (GET_CODE (XEXP (inner, 0)) == PLUS
1341 		  && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1342 		  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1343 		  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1344     }
1345 
1346   return (MEM_P (op)
1347 	  && memory_operand (op, mode)
1348 	  && general_operand (XEXP (op, 0), Pmode));
1349 }
1350 
1351 /* Return 1 if this is an ordered comparison operator (not including
1352    ORDERED and UNORDERED).  */
1353 
1354 int
1355 ordered_comparison_operator (rtx op, enum machine_mode mode)
1356 {
1357   if (mode != VOIDmode && GET_MODE (op) != mode)
1358     return false;
1359   switch (GET_CODE (op))
1360     {
1361     case EQ:
1362     case NE:
1363     case LT:
1364     case LTU:
1365     case LE:
1366     case LEU:
1367     case GT:
1368     case GTU:
1369     case GE:
1370     case GEU:
1371       return true;
1372     default:
1373       return false;
1374     }
1375 }
1376 
1377 /* Return 1 if this is a comparison operator.  This allows the use of
1378    MATCH_OPERATOR to recognize all the branch insns.  */
1379 
1380 int
1381 comparison_operator (rtx op, enum machine_mode mode)
1382 {
1383   return ((mode == VOIDmode || GET_MODE (op) == mode)
1384 	  && COMPARISON_P (op));
1385 }
1386 
1387 /* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1388 
1389 rtx
1390 extract_asm_operands (rtx body)
1391 {
1392   rtx tmp;
1393   switch (GET_CODE (body))
1394     {
1395     case ASM_OPERANDS:
1396       return body;
1397 
1398     case SET:
1399       /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1400       tmp = SET_SRC (body);
1401       if (GET_CODE (tmp) == ASM_OPERANDS)
1402 	return tmp;
1403       break;
1404 
1405     case PARALLEL:
1406       tmp = XVECEXP (body, 0, 0);
1407       if (GET_CODE (tmp) == ASM_OPERANDS)
1408 	return tmp;
1409       if (GET_CODE (tmp) == SET)
1410 	{
1411 	  tmp = SET_SRC (tmp);
1412 	  if (GET_CODE (tmp) == ASM_OPERANDS)
1413 	    return tmp;
1414 	}
1415       break;
1416 
1417     default:
1418       break;
1419     }
1420   return NULL;
1421 }
1422 
1423 /* If BODY is an insn body that uses ASM_OPERANDS,
1424    return the number of operands (both input and output) in the insn.
1425    Otherwise return -1.  */
1426 
1427 int
1428 asm_noperands (const_rtx body)
1429 {
1430   rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1431   int n_sets = 0;
1432 
1433   if (asm_op == NULL)
1434     return -1;
1435 
1436   if (GET_CODE (body) == SET)
1437     n_sets = 1;
1438   else if (GET_CODE (body) == PARALLEL)
1439     {
1440       int i;
1441       if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1442 	{
1443 	  /* Multiple output operands, or 1 output plus some clobbers:
1444 	     body is
1445 	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1446 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1447 	  for (i = XVECLEN (body, 0); i > 0; i--)
1448 	    {
1449 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1450 		break;
1451 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1452 		return -1;
1453 	    }
1454 
1455 	  /* N_SETS is now number of output operands.  */
1456 	  n_sets = i;
1457 
1458 	  /* Verify that all the SETs we have
1459 	     came from a single original asm_operands insn
1460 	     (so that invalid combinations are blocked).  */
1461 	  for (i = 0; i < n_sets; i++)
1462 	    {
1463 	      rtx elt = XVECEXP (body, 0, i);
1464 	      if (GET_CODE (elt) != SET)
1465 		return -1;
1466 	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1467 		return -1;
1468 	      /* If these ASM_OPERANDS rtx's came from different original insns
1469 	         then they aren't allowed together.  */
1470 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1471 		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1472 		return -1;
1473 	    }
1474 	}
1475       else
1476 	{
1477 	  /* 0 outputs, but some clobbers:
1478 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1479 	  /* Make sure all the other parallel things really are clobbers.  */
1480 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1481 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1482 	      return -1;
1483 	}
1484     }
1485 
1486   return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1487 	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1488 }
1489 
1490 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1491    copy its operands (both input and output) into the vector OPERANDS,
1492    the locations of the operands within the insn into the vector OPERAND_LOCS,
1493    and the constraints for the operands into CONSTRAINTS.
1494    Write the modes of the operands into MODES.
1495    Return the assembler-template.
1496 
1497    If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1498    we don't store that info.  */
1499 
1500 const char *
1501 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1502 		     const char **constraints, enum machine_mode *modes,
1503 		     location_t *loc)
1504 {
1505   int nbase = 0, n, i;
1506   rtx asmop;
1507 
1508   switch (GET_CODE (body))
1509     {
1510     case ASM_OPERANDS:
1511       /* Zero output asm: BODY is (asm_operands ...).  */
1512       asmop = body;
1513       break;
1514 
1515     case SET:
1516       /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1517       asmop = SET_SRC (body);
1518 
1519       /* The output is in the SET.
1520 	 Its constraint is in the ASM_OPERANDS itself.  */
1521       if (operands)
1522 	operands[0] = SET_DEST (body);
1523       if (operand_locs)
1524 	operand_locs[0] = &SET_DEST (body);
1525       if (constraints)
1526 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1527       if (modes)
1528 	modes[0] = GET_MODE (SET_DEST (body));
1529       nbase = 1;
1530       break;
1531 
1532     case PARALLEL:
1533       {
1534 	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1535 
1536 	asmop = XVECEXP (body, 0, 0);
1537 	if (GET_CODE (asmop) == SET)
1538 	  {
1539 	    asmop = SET_SRC (asmop);
1540 
1541 	    /* At least one output, plus some CLOBBERs.  The outputs are in
1542 	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1543 	    for (i = 0; i < nparallel; i++)
1544 	      {
1545 		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1546 		  break;		/* Past last SET */
1547 		if (operands)
1548 		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1549 		if (operand_locs)
1550 		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1551 		if (constraints)
1552 		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1553 		if (modes)
1554 		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1555 	      }
1556 	    nbase = i;
1557 	  }
1558 	break;
1559       }
1560 
1561     default:
1562       gcc_unreachable ();
1563     }
1564 
1565   n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1566   for (i = 0; i < n; i++)
1567     {
1568       if (operand_locs)
1569 	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1570       if (operands)
1571 	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1572       if (constraints)
1573 	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1574       if (modes)
1575 	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1576     }
1577   nbase += n;
1578 
1579   n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1580   for (i = 0; i < n; i++)
1581     {
1582       if (operand_locs)
1583 	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1584       if (operands)
1585 	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1586       if (constraints)
1587 	constraints[nbase + i] = "";
1588       if (modes)
1589 	modes[nbase + i] = Pmode;
1590     }
1591 
1592   if (loc)
1593     *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1594 
1595   return ASM_OPERANDS_TEMPLATE (asmop);
1596 }
1597 
1598 /* Check if an asm_operand matches its constraints.
1599    Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1600 
1601 int
1602 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1603 {
1604   int result = 0;
1605 
1606   /* Use constrain_operands after reload.  */
1607   gcc_assert (!reload_completed);
1608 
1609   /* Empty constraint string is the same as "X,...,X", i.e. X for as
1610      many alternatives as required to match the other operands.  */
1611   if (*constraint == '\0')
1612     return 1;
1613 
1614   while (*constraint)
1615     {
1616       char c = *constraint;
1617       int len;
1618       switch (c)
1619 	{
1620 	case ',':
1621 	  constraint++;
1622 	  continue;
1623 	case '=':
1624 	case '+':
1625 	case '*':
1626 	case '%':
1627 	case '!':
1628 	case '#':
1629 	case '&':
1630 	case '?':
1631 	  break;
1632 
1633 	case '0': case '1': case '2': case '3': case '4':
1634 	case '5': case '6': case '7': case '8': case '9':
1635 	  /* If caller provided constraints pointer, look up
1636 	     the maching constraint.  Otherwise, our caller should have
1637 	     given us the proper matching constraint, but we can't
1638 	     actually fail the check if they didn't.  Indicate that
1639 	     results are inconclusive.  */
1640 	  if (constraints)
1641 	    {
1642 	      char *end;
1643 	      unsigned long match;
1644 
1645 	      match = strtoul (constraint, &end, 10);
1646 	      if (!result)
1647 		result = asm_operand_ok (op, constraints[match], NULL);
1648 	      constraint = (const char *) end;
1649 	    }
1650 	  else
1651 	    {
1652 	      do
1653 		constraint++;
1654 	      while (ISDIGIT (*constraint));
1655 	      if (! result)
1656 		result = -1;
1657 	    }
1658 	  continue;
1659 
1660 	case 'p':
1661 	  if (address_operand (op, VOIDmode))
1662 	    result = 1;
1663 	  break;
1664 
1665 	case TARGET_MEM_CONSTRAINT:
1666 	case 'V': /* non-offsettable */
1667 	  if (memory_operand (op, VOIDmode))
1668 	    result = 1;
1669 	  break;
1670 
1671 	case 'o': /* offsettable */
1672 	  if (offsettable_nonstrict_memref_p (op))
1673 	    result = 1;
1674 	  break;
1675 
1676 	case '<':
1677 	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1678 	     excepting those that expand_call created.  Further, on some
1679 	     machines which do not have generalized auto inc/dec, an inc/dec
1680 	     is not a memory_operand.
1681 
1682 	     Match any memory and hope things are resolved after reload.  */
1683 
1684 	  if (MEM_P (op)
1685 	      && (1
1686 		  || GET_CODE (XEXP (op, 0)) == PRE_DEC
1687 		  || GET_CODE (XEXP (op, 0)) == POST_DEC))
1688 	    result = 1;
1689 	  break;
1690 
1691 	case '>':
1692 	  if (MEM_P (op)
1693 	      && (1
1694 		  || GET_CODE (XEXP (op, 0)) == PRE_INC
1695 		  || GET_CODE (XEXP (op, 0)) == POST_INC))
1696 	    result = 1;
1697 	  break;
1698 
1699 	case 'E':
1700 	case 'F':
1701 	  if (GET_CODE (op) == CONST_DOUBLE
1702 	      || (GET_CODE (op) == CONST_VECTOR
1703 		  && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1704 	    result = 1;
1705 	  break;
1706 
1707 	case 'G':
1708 	  if (GET_CODE (op) == CONST_DOUBLE
1709 	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1710 	    result = 1;
1711 	  break;
1712 	case 'H':
1713 	  if (GET_CODE (op) == CONST_DOUBLE
1714 	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1715 	    result = 1;
1716 	  break;
1717 
1718 	case 's':
1719 	  if (CONST_INT_P (op)
1720 	      || (GET_CODE (op) == CONST_DOUBLE
1721 		  && GET_MODE (op) == VOIDmode))
1722 	    break;
1723 	  /* Fall through.  */
1724 
1725 	case 'i':
1726 	  if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1727 	    result = 1;
1728 	  break;
1729 
1730 	case 'n':
1731 	  if (CONST_INT_P (op)
1732 	      || (GET_CODE (op) == CONST_DOUBLE
1733 		  && GET_MODE (op) == VOIDmode))
1734 	    result = 1;
1735 	  break;
1736 
1737 	case 'I':
1738 	  if (CONST_INT_P (op)
1739 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1740 	    result = 1;
1741 	  break;
1742 	case 'J':
1743 	  if (CONST_INT_P (op)
1744 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1745 	    result = 1;
1746 	  break;
1747 	case 'K':
1748 	  if (CONST_INT_P (op)
1749 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1750 	    result = 1;
1751 	  break;
1752 	case 'L':
1753 	  if (CONST_INT_P (op)
1754 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1755 	    result = 1;
1756 	  break;
1757 	case 'M':
1758 	  if (CONST_INT_P (op)
1759 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1760 	    result = 1;
1761 	  break;
1762 	case 'N':
1763 	  if (CONST_INT_P (op)
1764 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1765 	    result = 1;
1766 	  break;
1767 	case 'O':
1768 	  if (CONST_INT_P (op)
1769 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1770 	    result = 1;
1771 	  break;
1772 	case 'P':
1773 	  if (CONST_INT_P (op)
1774 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1775 	    result = 1;
1776 	  break;
1777 
1778 	case 'X':
1779 	  result = 1;
1780 	  break;
1781 
1782 	case 'g':
1783 	  if (general_operand (op, VOIDmode))
1784 	    result = 1;
1785 	  break;
1786 
1787 	default:
1788 	  /* For all other letters, we first check for a register class,
1789 	     otherwise it is an EXTRA_CONSTRAINT.  */
1790 	  if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1791 	    {
1792 	    case 'r':
1793 	      if (GET_MODE (op) == BLKmode)
1794 		break;
1795 	      if (register_operand (op, VOIDmode))
1796 		result = 1;
1797 	    }
1798 #ifdef EXTRA_CONSTRAINT_STR
1799 	  else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1800 	    /* Every memory operand can be reloaded to fit.  */
1801 	    result = result || memory_operand (op, VOIDmode);
1802 	  else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1803 	    /* Every address operand can be reloaded to fit.  */
1804 	    result = result || address_operand (op, VOIDmode);
1805 	  else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1806 	    result = 1;
1807 #endif
1808 	  break;
1809 	}
1810       len = CONSTRAINT_LEN (c, constraint);
1811       do
1812 	constraint++;
1813       while (--len && *constraint);
1814       if (len)
1815 	return 0;
1816     }
1817 
1818   return result;
1819 }
1820 
1821 /* Given an rtx *P, if it is a sum containing an integer constant term,
1822    return the location (type rtx *) of the pointer to that constant term.
1823    Otherwise, return a null pointer.  */
1824 
1825 rtx *
1826 find_constant_term_loc (rtx *p)
1827 {
1828   rtx *tem;
1829   enum rtx_code code = GET_CODE (*p);
1830 
1831   /* If *P IS such a constant term, P is its location.  */
1832 
1833   if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1834       || code == CONST)
1835     return p;
1836 
1837   /* Otherwise, if not a sum, it has no constant term.  */
1838 
1839   if (GET_CODE (*p) != PLUS)
1840     return 0;
1841 
1842   /* If one of the summands is constant, return its location.  */
1843 
1844   if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1845       && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1846     return p;
1847 
1848   /* Otherwise, check each summand for containing a constant term.  */
1849 
1850   if (XEXP (*p, 0) != 0)
1851     {
1852       tem = find_constant_term_loc (&XEXP (*p, 0));
1853       if (tem != 0)
1854 	return tem;
1855     }
1856 
1857   if (XEXP (*p, 1) != 0)
1858     {
1859       tem = find_constant_term_loc (&XEXP (*p, 1));
1860       if (tem != 0)
1861 	return tem;
1862     }
1863 
1864   return 0;
1865 }
1866 
1867 /* Return 1 if OP is a memory reference
1868    whose address contains no side effects
1869    and remains valid after the addition
1870    of a positive integer less than the
1871    size of the object being referenced.
1872 
1873    We assume that the original address is valid and do not check it.
1874 
1875    This uses strict_memory_address_p as a subroutine, so
1876    don't use it before reload.  */
1877 
1878 int
1879 offsettable_memref_p (rtx op)
1880 {
1881   return ((MEM_P (op))
1882 	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1883 					       MEM_ADDR_SPACE (op)));
1884 }
1885 
1886 /* Similar, but don't require a strictly valid mem ref:
1887    consider pseudo-regs valid as index or base regs.  */
1888 
1889 int
1890 offsettable_nonstrict_memref_p (rtx op)
1891 {
1892   return ((MEM_P (op))
1893 	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1894 					       MEM_ADDR_SPACE (op)));
1895 }
1896 
1897 /* Return 1 if Y is a memory address which contains no side effects
1898    and would remain valid for address space AS after the addition of
1899    a positive integer less than the size of that mode.
1900 
1901    We assume that the original address is valid and do not check it.
1902    We do check that it is valid for narrower modes.
1903 
1904    If STRICTP is nonzero, we require a strictly valid address,
1905    for the sake of use in reload.c.  */
1906 
1907 int
1908 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1909 				  addr_space_t as)
1910 {
1911   enum rtx_code ycode = GET_CODE (y);
1912   rtx z;
1913   rtx y1 = y;
1914   rtx *y2;
1915   int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1916     (strictp ? strict_memory_address_addr_space_p
1917 	     : memory_address_addr_space_p);
1918   unsigned int mode_sz = GET_MODE_SIZE (mode);
1919 
1920   if (CONSTANT_ADDRESS_P (y))
1921     return 1;
1922 
1923   /* Adjusting an offsettable address involves changing to a narrower mode.
1924      Make sure that's OK.  */
1925 
1926   if (mode_dependent_address_p (y))
1927     return 0;
1928 
1929   /* ??? How much offset does an offsettable BLKmode reference need?
1930      Clearly that depends on the situation in which it's being used.
1931      However, the current situation in which we test 0xffffffff is
1932      less than ideal.  Caveat user.  */
1933   if (mode_sz == 0)
1934     mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1935 
1936   /* If the expression contains a constant term,
1937      see if it remains valid when max possible offset is added.  */
1938 
1939   if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1940     {
1941       int good;
1942 
1943       y1 = *y2;
1944       *y2 = plus_constant (*y2, mode_sz - 1);
1945       /* Use QImode because an odd displacement may be automatically invalid
1946 	 for any wider mode.  But it should be valid for a single byte.  */
1947       good = (*addressp) (QImode, y, as);
1948 
1949       /* In any case, restore old contents of memory.  */
1950       *y2 = y1;
1951       return good;
1952     }
1953 
1954   if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1955     return 0;
1956 
1957   /* The offset added here is chosen as the maximum offset that
1958      any instruction could need to add when operating on something
1959      of the specified mode.  We assume that if Y and Y+c are
1960      valid addresses then so is Y+d for all 0<d<c.  adjust_address will
1961      go inside a LO_SUM here, so we do so as well.  */
1962   if (GET_CODE (y) == LO_SUM
1963       && mode != BLKmode
1964       && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1965     z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1966 			plus_constant (XEXP (y, 1), mode_sz - 1));
1967   else
1968     z = plus_constant (y, mode_sz - 1);
1969 
1970   /* Use QImode because an odd displacement may be automatically invalid
1971      for any wider mode.  But it should be valid for a single byte.  */
1972   return (*addressp) (QImode, z, as);
1973 }
1974 
1975 /* Return 1 if ADDR is an address-expression whose effect depends
1976    on the mode of the memory reference it is used in.
1977 
1978    Autoincrement addressing is a typical example of mode-dependence
1979    because the amount of the increment depends on the mode.  */
1980 
1981 int
1982 mode_dependent_address_p (rtx addr)
1983 {
1984   /* Auto-increment addressing with anything other than post_modify
1985      or pre_modify always introduces a mode dependency.  Catch such
1986      cases now instead of deferring to the target.  */
1987   if (GET_CODE (addr) == PRE_INC
1988       || GET_CODE (addr) == POST_INC
1989       || GET_CODE (addr) == PRE_DEC
1990       || GET_CODE (addr) == POST_DEC)
1991     return 1;
1992 
1993   GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1994   return 0;
1995   /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS.  */
1996  win: ATTRIBUTE_UNUSED_LABEL
1997   return 1;
1998 }
1999 
2000 /* Like extract_insn, but save insn extracted and don't extract again, when
2001    called again for the same insn expecting that recog_data still contain the
2002    valid information.  This is used primary by gen_attr infrastructure that
2003    often does extract insn again and again.  */
2004 void
2005 extract_insn_cached (rtx insn)
2006 {
2007   if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2008     return;
2009   extract_insn (insn);
2010   recog_data.insn = insn;
2011 }
2012 
2013 /* Do cached extract_insn, constrain_operands and complain about failures.
2014    Used by insn_attrtab.  */
2015 void
2016 extract_constrain_insn_cached (rtx insn)
2017 {
2018   extract_insn_cached (insn);
2019   if (which_alternative == -1
2020       && !constrain_operands (reload_completed))
2021     fatal_insn_not_found (insn);
2022 }
2023 
2024 /* Do cached constrain_operands and complain about failures.  */
2025 int
2026 constrain_operands_cached (int strict)
2027 {
2028   if (which_alternative == -1)
2029     return constrain_operands (strict);
2030   else
2031     return 1;
2032 }
2033 
2034 /* Analyze INSN and fill in recog_data.  */
2035 
2036 void
2037 extract_insn (rtx insn)
2038 {
2039   int i;
2040   int icode;
2041   int noperands;
2042   rtx body = PATTERN (insn);
2043 
2044   recog_data.n_operands = 0;
2045   recog_data.n_alternatives = 0;
2046   recog_data.n_dups = 0;
2047 
2048   switch (GET_CODE (body))
2049     {
2050     case USE:
2051     case CLOBBER:
2052     case ASM_INPUT:
2053     case ADDR_VEC:
2054     case ADDR_DIFF_VEC:
2055     case VAR_LOCATION:
2056       return;
2057 
2058     case SET:
2059       if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2060 	goto asm_insn;
2061       else
2062 	goto normal_insn;
2063     case PARALLEL:
2064       if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2065 	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2066 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2067 	goto asm_insn;
2068       else
2069 	goto normal_insn;
2070     case ASM_OPERANDS:
2071     asm_insn:
2072       recog_data.n_operands = noperands = asm_noperands (body);
2073       if (noperands >= 0)
2074 	{
2075 	  /* This insn is an `asm' with operands.  */
2076 
2077 	  /* expand_asm_operands makes sure there aren't too many operands.  */
2078 	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2079 
2080 	  /* Now get the operand values and constraints out of the insn.  */
2081 	  decode_asm_operands (body, recog_data.operand,
2082 			       recog_data.operand_loc,
2083 			       recog_data.constraints,
2084 			       recog_data.operand_mode, NULL);
2085 	  if (noperands > 0)
2086 	    {
2087 	      const char *p =  recog_data.constraints[0];
2088 	      recog_data.n_alternatives = 1;
2089 	      while (*p)
2090 		recog_data.n_alternatives += (*p++ == ',');
2091 	    }
2092 	  break;
2093 	}
2094       fatal_insn_not_found (insn);
2095 
2096     default:
2097     normal_insn:
2098       /* Ordinary insn: recognize it, get the operands via insn_extract
2099 	 and get the constraints.  */
2100 
2101       icode = recog_memoized (insn);
2102       if (icode < 0)
2103 	fatal_insn_not_found (insn);
2104 
2105       recog_data.n_operands = noperands = insn_data[icode].n_operands;
2106       recog_data.n_alternatives = insn_data[icode].n_alternatives;
2107       recog_data.n_dups = insn_data[icode].n_dups;
2108 
2109       insn_extract (insn);
2110 
2111       for (i = 0; i < noperands; i++)
2112 	{
2113 	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2114 	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2115 	  /* VOIDmode match_operands gets mode from their real operand.  */
2116 	  if (recog_data.operand_mode[i] == VOIDmode)
2117 	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2118 	}
2119     }
2120   for (i = 0; i < noperands; i++)
2121     recog_data.operand_type[i]
2122       = (recog_data.constraints[i][0] == '=' ? OP_OUT
2123 	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2124 	 : OP_IN);
2125 
2126   gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2127 
2128   if (INSN_CODE (insn) < 0)
2129     for (i = 0; i < recog_data.n_alternatives; i++)
2130       recog_data.alternative_enabled_p[i] = true;
2131   else
2132     {
2133       recog_data.insn = insn;
2134       for (i = 0; i < recog_data.n_alternatives; i++)
2135 	{
2136 	  which_alternative = i;
2137 	  recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2138 	}
2139     }
2140 
2141   recog_data.insn = NULL;
2142   which_alternative = -1;
2143 }
2144 
2145 /* After calling extract_insn, you can use this function to extract some
2146    information from the constraint strings into a more usable form.
2147    The collected data is stored in recog_op_alt.  */
2148 void
2149 preprocess_constraints (void)
2150 {
2151   int i;
2152 
2153   for (i = 0; i < recog_data.n_operands; i++)
2154     memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2155 				 * sizeof (struct operand_alternative)));
2156 
2157   for (i = 0; i < recog_data.n_operands; i++)
2158     {
2159       int j;
2160       struct operand_alternative *op_alt;
2161       const char *p = recog_data.constraints[i];
2162 
2163       op_alt = recog_op_alt[i];
2164 
2165       for (j = 0; j < recog_data.n_alternatives; j++)
2166 	{
2167 	  op_alt[j].cl = NO_REGS;
2168 	  op_alt[j].constraint = p;
2169 	  op_alt[j].matches = -1;
2170 	  op_alt[j].matched = -1;
2171 
2172 	  if (!recog_data.alternative_enabled_p[j])
2173 	    {
2174 	      p = skip_alternative (p);
2175 	      continue;
2176 	    }
2177 
2178 	  if (*p == '\0' || *p == ',')
2179 	    {
2180 	      op_alt[j].anything_ok = 1;
2181 	      continue;
2182 	    }
2183 
2184 	  for (;;)
2185 	    {
2186 	      char c = *p;
2187 	      if (c == '#')
2188 		do
2189 		  c = *++p;
2190 		while (c != ',' && c != '\0');
2191 	      if (c == ',' || c == '\0')
2192 		{
2193 		  p++;
2194 		  break;
2195 		}
2196 
2197 	      switch (c)
2198 		{
2199 		case '=': case '+': case '*': case '%':
2200 		case 'E': case 'F': case 'G': case 'H':
2201 		case 's': case 'i': case 'n':
2202 		case 'I': case 'J': case 'K': case 'L':
2203 		case 'M': case 'N': case 'O': case 'P':
2204 		  /* These don't say anything we care about.  */
2205 		  break;
2206 
2207 		case '?':
2208 		  op_alt[j].reject += 6;
2209 		  break;
2210 		case '!':
2211 		  op_alt[j].reject += 600;
2212 		  break;
2213 		case '&':
2214 		  op_alt[j].earlyclobber = 1;
2215 		  break;
2216 
2217 		case '0': case '1': case '2': case '3': case '4':
2218 		case '5': case '6': case '7': case '8': case '9':
2219 		  {
2220 		    char *end;
2221 		    op_alt[j].matches = strtoul (p, &end, 10);
2222 		    recog_op_alt[op_alt[j].matches][j].matched = i;
2223 		    p = end;
2224 		  }
2225 		  continue;
2226 
2227 		case TARGET_MEM_CONSTRAINT:
2228 		  op_alt[j].memory_ok = 1;
2229 		  break;
2230 		case '<':
2231 		  op_alt[j].decmem_ok = 1;
2232 		  break;
2233 		case '>':
2234 		  op_alt[j].incmem_ok = 1;
2235 		  break;
2236 		case 'V':
2237 		  op_alt[j].nonoffmem_ok = 1;
2238 		  break;
2239 		case 'o':
2240 		  op_alt[j].offmem_ok = 1;
2241 		  break;
2242 		case 'X':
2243 		  op_alt[j].anything_ok = 1;
2244 		  break;
2245 
2246 		case 'p':
2247 		  op_alt[j].is_address = 1;
2248 		  op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2249 		      [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2250 		  break;
2251 
2252 		case 'g':
2253 		case 'r':
2254 		  op_alt[j].cl =
2255 		   reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2256 		  break;
2257 
2258 		default:
2259 		  if (EXTRA_MEMORY_CONSTRAINT (c, p))
2260 		    {
2261 		      op_alt[j].memory_ok = 1;
2262 		      break;
2263 		    }
2264 		  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2265 		    {
2266 		      op_alt[j].is_address = 1;
2267 		      op_alt[j].cl
2268 			= (reg_class_subunion
2269 			   [(int) op_alt[j].cl]
2270 			   [(int) base_reg_class (VOIDmode, ADDRESS,
2271 						  SCRATCH)]);
2272 		      break;
2273 		    }
2274 
2275 		  op_alt[j].cl
2276 		    = (reg_class_subunion
2277 		       [(int) op_alt[j].cl]
2278 		       [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2279 		  break;
2280 		}
2281 	      p += CONSTRAINT_LEN (c, p);
2282 	    }
2283 	}
2284     }
2285 }
2286 
2287 /* Check the operands of an insn against the insn's operand constraints
2288    and return 1 if they are valid.
2289    The information about the insn's operands, constraints, operand modes
2290    etc. is obtained from the global variables set up by extract_insn.
2291 
2292    WHICH_ALTERNATIVE is set to a number which indicates which
2293    alternative of constraints was matched: 0 for the first alternative,
2294    1 for the next, etc.
2295 
2296    In addition, when two operands are required to match
2297    and it happens that the output operand is (reg) while the
2298    input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2299    make the output operand look like the input.
2300    This is because the output operand is the one the template will print.
2301 
2302    This is used in final, just before printing the assembler code and by
2303    the routines that determine an insn's attribute.
2304 
2305    If STRICT is a positive nonzero value, it means that we have been
2306    called after reload has been completed.  In that case, we must
2307    do all checks strictly.  If it is zero, it means that we have been called
2308    before reload has completed.  In that case, we first try to see if we can
2309    find an alternative that matches strictly.  If not, we try again, this
2310    time assuming that reload will fix up the insn.  This provides a "best
2311    guess" for the alternative and is used to compute attributes of insns prior
2312    to reload.  A negative value of STRICT is used for this internal call.  */
2313 
2314 struct funny_match
2315 {
2316   int this_op, other;
2317 };
2318 
2319 int
2320 constrain_operands (int strict)
2321 {
2322   const char *constraints[MAX_RECOG_OPERANDS];
2323   int matching_operands[MAX_RECOG_OPERANDS];
2324   int earlyclobber[MAX_RECOG_OPERANDS];
2325   int c;
2326 
2327   struct funny_match funny_match[MAX_RECOG_OPERANDS];
2328   int funny_match_index;
2329 
2330   which_alternative = 0;
2331   if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2332     return 1;
2333 
2334   for (c = 0; c < recog_data.n_operands; c++)
2335     {
2336       constraints[c] = recog_data.constraints[c];
2337       matching_operands[c] = -1;
2338     }
2339 
2340   do
2341     {
2342       int seen_earlyclobber_at = -1;
2343       int opno;
2344       int lose = 0;
2345       funny_match_index = 0;
2346 
2347       if (!recog_data.alternative_enabled_p[which_alternative])
2348 	{
2349 	  int i;
2350 
2351 	  for (i = 0; i < recog_data.n_operands; i++)
2352 	    constraints[i] = skip_alternative (constraints[i]);
2353 
2354 	  which_alternative++;
2355 	  continue;
2356 	}
2357 
2358       for (opno = 0; opno < recog_data.n_operands; opno++)
2359 	{
2360 	  rtx op = recog_data.operand[opno];
2361 	  enum machine_mode mode = GET_MODE (op);
2362 	  const char *p = constraints[opno];
2363 	  int offset = 0;
2364 	  int win = 0;
2365 	  int val;
2366 	  int len;
2367 
2368 	  earlyclobber[opno] = 0;
2369 
2370 	  /* A unary operator may be accepted by the predicate, but it
2371 	     is irrelevant for matching constraints.  */
2372 	  if (UNARY_P (op))
2373 	    op = XEXP (op, 0);
2374 
2375 	  if (GET_CODE (op) == SUBREG)
2376 	    {
2377 	      if (REG_P (SUBREG_REG (op))
2378 		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2379 		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2380 					      GET_MODE (SUBREG_REG (op)),
2381 					      SUBREG_BYTE (op),
2382 					      GET_MODE (op));
2383 	      op = SUBREG_REG (op);
2384 	    }
2385 
2386 	  /* An empty constraint or empty alternative
2387 	     allows anything which matched the pattern.  */
2388 	  if (*p == 0 || *p == ',')
2389 	    win = 1;
2390 
2391 	  do
2392 	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2393 	      {
2394 	      case '\0':
2395 		len = 0;
2396 		break;
2397 	      case ',':
2398 		c = '\0';
2399 		break;
2400 
2401 	      case '?':  case '!': case '*':  case '%':
2402 	      case '=':  case '+':
2403 		break;
2404 
2405 	      case '#':
2406 		/* Ignore rest of this alternative as far as
2407 		   constraint checking is concerned.  */
2408 		do
2409 		  p++;
2410 		while (*p && *p != ',');
2411 		len = 0;
2412 		break;
2413 
2414 	      case '&':
2415 		earlyclobber[opno] = 1;
2416 		if (seen_earlyclobber_at < 0)
2417 		  seen_earlyclobber_at = opno;
2418 		break;
2419 
2420 	      case '0':  case '1':  case '2':  case '3':  case '4':
2421 	      case '5':  case '6':  case '7':  case '8':  case '9':
2422 		{
2423 		  /* This operand must be the same as a previous one.
2424 		     This kind of constraint is used for instructions such
2425 		     as add when they take only two operands.
2426 
2427 		     Note that the lower-numbered operand is passed first.
2428 
2429 		     If we are not testing strictly, assume that this
2430 		     constraint will be satisfied.  */
2431 
2432 		  char *end;
2433 		  int match;
2434 
2435 		  match = strtoul (p, &end, 10);
2436 		  p = end;
2437 
2438 		  if (strict < 0)
2439 		    val = 1;
2440 		  else
2441 		    {
2442 		      rtx op1 = recog_data.operand[match];
2443 		      rtx op2 = recog_data.operand[opno];
2444 
2445 		      /* A unary operator may be accepted by the predicate,
2446 			 but it is irrelevant for matching constraints.  */
2447 		      if (UNARY_P (op1))
2448 			op1 = XEXP (op1, 0);
2449 		      if (UNARY_P (op2))
2450 			op2 = XEXP (op2, 0);
2451 
2452 		      val = operands_match_p (op1, op2);
2453 		    }
2454 
2455 		  matching_operands[opno] = match;
2456 		  matching_operands[match] = opno;
2457 
2458 		  if (val != 0)
2459 		    win = 1;
2460 
2461 		  /* If output is *x and input is *--x, arrange later
2462 		     to change the output to *--x as well, since the
2463 		     output op is the one that will be printed.  */
2464 		  if (val == 2 && strict > 0)
2465 		    {
2466 		      funny_match[funny_match_index].this_op = opno;
2467 		      funny_match[funny_match_index++].other = match;
2468 		    }
2469 		}
2470 		len = 0;
2471 		break;
2472 
2473 	      case 'p':
2474 		/* p is used for address_operands.  When we are called by
2475 		   gen_reload, no one will have checked that the address is
2476 		   strictly valid, i.e., that all pseudos requiring hard regs
2477 		   have gotten them.  */
2478 		if (strict <= 0
2479 		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2480 						 op)))
2481 		  win = 1;
2482 		break;
2483 
2484 		/* No need to check general_operand again;
2485 		   it was done in insn-recog.c.  Well, except that reload
2486 		   doesn't check the validity of its replacements, but
2487 		   that should only matter when there's a bug.  */
2488 	      case 'g':
2489 		/* Anything goes unless it is a REG and really has a hard reg
2490 		   but the hard reg is not in the class GENERAL_REGS.  */
2491 		if (REG_P (op))
2492 		  {
2493 		    if (strict < 0
2494 			|| GENERAL_REGS == ALL_REGS
2495 			|| (reload_in_progress
2496 			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2497 			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2498 		      win = 1;
2499 		  }
2500 		else if (strict < 0 || general_operand (op, mode))
2501 		  win = 1;
2502 		break;
2503 
2504 	      case 'X':
2505 		/* This is used for a MATCH_SCRATCH in the cases when
2506 		   we don't actually need anything.  So anything goes
2507 		   any time.  */
2508 		win = 1;
2509 		break;
2510 
2511 	      case TARGET_MEM_CONSTRAINT:
2512 		/* Memory operands must be valid, to the extent
2513 		   required by STRICT.  */
2514 		if (MEM_P (op))
2515 		  {
2516 		    if (strict > 0
2517 			&& !strict_memory_address_addr_space_p
2518 			     (GET_MODE (op), XEXP (op, 0),
2519 			      MEM_ADDR_SPACE (op)))
2520 		      break;
2521 		    if (strict == 0
2522 			&& !memory_address_addr_space_p
2523 			     (GET_MODE (op), XEXP (op, 0),
2524 			      MEM_ADDR_SPACE (op)))
2525 		      break;
2526 		    win = 1;
2527 		  }
2528 		/* Before reload, accept what reload can turn into mem.  */
2529 		else if (strict < 0 && CONSTANT_P (op))
2530 		  win = 1;
2531 		/* During reload, accept a pseudo  */
2532 		else if (reload_in_progress && REG_P (op)
2533 			 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2534 		  win = 1;
2535 		break;
2536 
2537 	      case '<':
2538 		if (MEM_P (op)
2539 		    && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2540 			|| GET_CODE (XEXP (op, 0)) == POST_DEC))
2541 		  win = 1;
2542 		break;
2543 
2544 	      case '>':
2545 		if (MEM_P (op)
2546 		    && (GET_CODE (XEXP (op, 0)) == PRE_INC
2547 			|| GET_CODE (XEXP (op, 0)) == POST_INC))
2548 		  win = 1;
2549 		break;
2550 
2551 	      case 'E':
2552 	      case 'F':
2553 		if (GET_CODE (op) == CONST_DOUBLE
2554 		    || (GET_CODE (op) == CONST_VECTOR
2555 			&& GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2556 		  win = 1;
2557 		break;
2558 
2559 	      case 'G':
2560 	      case 'H':
2561 		if (GET_CODE (op) == CONST_DOUBLE
2562 		    && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2563 		  win = 1;
2564 		break;
2565 
2566 	      case 's':
2567 		if (CONST_INT_P (op)
2568 		    || (GET_CODE (op) == CONST_DOUBLE
2569 			&& GET_MODE (op) == VOIDmode))
2570 		  break;
2571 	      case 'i':
2572 		if (CONSTANT_P (op))
2573 		  win = 1;
2574 		break;
2575 
2576 	      case 'n':
2577 		if (CONST_INT_P (op)
2578 		    || (GET_CODE (op) == CONST_DOUBLE
2579 			&& GET_MODE (op) == VOIDmode))
2580 		  win = 1;
2581 		break;
2582 
2583 	      case 'I':
2584 	      case 'J':
2585 	      case 'K':
2586 	      case 'L':
2587 	      case 'M':
2588 	      case 'N':
2589 	      case 'O':
2590 	      case 'P':
2591 		if (CONST_INT_P (op)
2592 		    && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2593 		  win = 1;
2594 		break;
2595 
2596 	      case 'V':
2597 		if (MEM_P (op)
2598 		    && ((strict > 0 && ! offsettable_memref_p (op))
2599 			|| (strict < 0
2600 			    && !(CONSTANT_P (op) || MEM_P (op)))
2601 			|| (reload_in_progress
2602 			    && !(REG_P (op)
2603 				 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2604 		  win = 1;
2605 		break;
2606 
2607 	      case 'o':
2608 		if ((strict > 0 && offsettable_memref_p (op))
2609 		    || (strict == 0 && offsettable_nonstrict_memref_p (op))
2610 		    /* Before reload, accept what reload can handle.  */
2611 		    || (strict < 0
2612 			&& (CONSTANT_P (op) || MEM_P (op)))
2613 		    /* During reload, accept a pseudo  */
2614 		    || (reload_in_progress && REG_P (op)
2615 			&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
2616 		  win = 1;
2617 		break;
2618 
2619 	      default:
2620 		{
2621 		  enum reg_class cl;
2622 
2623 		  cl = (c == 'r'
2624 			   ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2625 		  if (cl != NO_REGS)
2626 		    {
2627 		      if (strict < 0
2628 			  || (strict == 0
2629 			      && REG_P (op)
2630 			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2631 			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2632 			  || (REG_P (op)
2633 			      && reg_fits_class_p (op, cl, offset, mode)))
2634 		        win = 1;
2635 		    }
2636 #ifdef EXTRA_CONSTRAINT_STR
2637 		  else if (EXTRA_CONSTRAINT_STR (op, c, p))
2638 		    win = 1;
2639 
2640 		  else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2641 			   /* Every memory operand can be reloaded to fit.  */
2642 			   && ((strict < 0 && MEM_P (op))
2643 			       /* Before reload, accept what reload can turn
2644 				  into mem.  */
2645 			       || (strict < 0 && CONSTANT_P (op))
2646 			       /* During reload, accept a pseudo  */
2647 			       || (reload_in_progress && REG_P (op)
2648 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2649 		    win = 1;
2650 		  else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2651 			   /* Every address operand can be reloaded to fit.  */
2652 			   && strict < 0)
2653 		    win = 1;
2654 #endif
2655 		  break;
2656 		}
2657 	      }
2658 	  while (p += len, c);
2659 
2660 	  constraints[opno] = p;
2661 	  /* If this operand did not win somehow,
2662 	     this alternative loses.  */
2663 	  if (! win)
2664 	    lose = 1;
2665 	}
2666       /* This alternative won; the operands are ok.
2667 	 Change whichever operands this alternative says to change.  */
2668       if (! lose)
2669 	{
2670 	  int opno, eopno;
2671 
2672 	  /* See if any earlyclobber operand conflicts with some other
2673 	     operand.  */
2674 
2675 	  if (strict > 0  && seen_earlyclobber_at >= 0)
2676 	    for (eopno = seen_earlyclobber_at;
2677 		 eopno < recog_data.n_operands;
2678 		 eopno++)
2679 	      /* Ignore earlyclobber operands now in memory,
2680 		 because we would often report failure when we have
2681 		 two memory operands, one of which was formerly a REG.  */
2682 	      if (earlyclobber[eopno]
2683 		  && REG_P (recog_data.operand[eopno]))
2684 		for (opno = 0; opno < recog_data.n_operands; opno++)
2685 		  if ((MEM_P (recog_data.operand[opno])
2686 		       || recog_data.operand_type[opno] != OP_OUT)
2687 		      && opno != eopno
2688 		      /* Ignore things like match_operator operands.  */
2689 		      && *recog_data.constraints[opno] != 0
2690 		      && ! (matching_operands[opno] == eopno
2691 			    && operands_match_p (recog_data.operand[opno],
2692 						 recog_data.operand[eopno]))
2693 		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2694 						   recog_data.operand[eopno]))
2695 		    lose = 1;
2696 
2697 	  if (! lose)
2698 	    {
2699 	      while (--funny_match_index >= 0)
2700 		{
2701 		  recog_data.operand[funny_match[funny_match_index].other]
2702 		    = recog_data.operand[funny_match[funny_match_index].this_op];
2703 		}
2704 
2705 	      return 1;
2706 	    }
2707 	}
2708 
2709       which_alternative++;
2710     }
2711   while (which_alternative < recog_data.n_alternatives);
2712 
2713   which_alternative = -1;
2714   /* If we are about to reject this, but we are not to test strictly,
2715      try a very loose test.  Only return failure if it fails also.  */
2716   if (strict == 0)
2717     return constrain_operands (-1);
2718   else
2719     return 0;
2720 }
2721 
2722 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2723    is a hard reg in class CLASS when its regno is offset by OFFSET
2724    and changed to mode MODE.
2725    If REG occupies multiple hard regs, all of them must be in CLASS.  */
2726 
2727 int
2728 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2729 		  enum machine_mode mode)
2730 {
2731   int regno = REGNO (operand);
2732 
2733   if (cl == NO_REGS)
2734     return 0;
2735 
2736   return (regno < FIRST_PSEUDO_REGISTER
2737 	  && in_hard_reg_set_p (reg_class_contents[(int) cl],
2738 				mode, regno + offset));
2739 }
2740 
2741 /* Split single instruction.  Helper function for split_all_insns and
2742    split_all_insns_noflow.  Return last insn in the sequence if successful,
2743    or NULL if unsuccessful.  */
2744 
2745 static rtx
2746 split_insn (rtx insn)
2747 {
2748   /* Split insns here to get max fine-grain parallelism.  */
2749   rtx first = PREV_INSN (insn);
2750   rtx last = try_split (PATTERN (insn), insn, 1);
2751   rtx insn_set, last_set, note;
2752 
2753   if (last == insn)
2754     return NULL_RTX;
2755 
2756   /* If the original instruction was a single set that was known to be
2757      equivalent to a constant, see if we can say the same about the last
2758      instruction in the split sequence.  The two instructions must set
2759      the same destination.  */
2760   insn_set = single_set (insn);
2761   if (insn_set)
2762     {
2763       last_set = single_set (last);
2764       if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2765 	{
2766 	  note = find_reg_equal_equiv_note (insn);
2767 	  if (note && CONSTANT_P (XEXP (note, 0)))
2768 	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2769 	  else if (CONSTANT_P (SET_SRC (insn_set)))
2770 	    set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2771 	}
2772     }
2773 
2774   /* try_split returns the NOTE that INSN became.  */
2775   SET_INSN_DELETED (insn);
2776 
2777   /* ??? Coddle to md files that generate subregs in post-reload
2778      splitters instead of computing the proper hard register.  */
2779   if (reload_completed && first != last)
2780     {
2781       first = NEXT_INSN (first);
2782       for (;;)
2783 	{
2784 	  if (INSN_P (first))
2785 	    cleanup_subreg_operands (first);
2786 	  if (first == last)
2787 	    break;
2788 	  first = NEXT_INSN (first);
2789 	}
2790     }
2791 
2792   return last;
2793 }
2794 
2795 /* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2796 
2797 void
2798 split_all_insns (void)
2799 {
2800   sbitmap blocks;
2801   bool changed;
2802   basic_block bb;
2803 
2804   blocks = sbitmap_alloc (last_basic_block);
2805   sbitmap_zero (blocks);
2806   changed = false;
2807 
2808   FOR_EACH_BB_REVERSE (bb)
2809     {
2810       rtx insn, next;
2811       bool finish = false;
2812 
2813       rtl_profile_for_bb (bb);
2814       for (insn = BB_HEAD (bb); !finish ; insn = next)
2815 	{
2816 	  /* Can't use `next_real_insn' because that might go across
2817 	     CODE_LABELS and short-out basic blocks.  */
2818 	  next = NEXT_INSN (insn);
2819 	  finish = (insn == BB_END (bb));
2820 	  if (INSN_P (insn))
2821 	    {
2822 	      rtx set = single_set (insn);
2823 
2824 	      /* Don't split no-op move insns.  These should silently
2825 		 disappear later in final.  Splitting such insns would
2826 		 break the code that handles LIBCALL blocks.  */
2827 	      if (set && set_noop_p (set))
2828 		{
2829 		  /* Nops get in the way while scheduling, so delete them
2830 		     now if register allocation has already been done.  It
2831 		     is too risky to try to do this before register
2832 		     allocation, and there are unlikely to be very many
2833 		     nops then anyways.  */
2834 		  if (reload_completed)
2835 		      delete_insn_and_edges (insn);
2836 		}
2837 	      else
2838 		{
2839 		  rtx last = split_insn (insn);
2840 		  if (last)
2841 		    {
2842 		      /* The split sequence may include barrier, but the
2843 			 BB boundary we are interested in will be set to
2844 			 previous one.  */
2845 
2846 		      while (BARRIER_P (last))
2847 			last = PREV_INSN (last);
2848 		      SET_BIT (blocks, bb->index);
2849 		      changed = true;
2850 		    }
2851 		}
2852 	    }
2853 	}
2854     }
2855 
2856   default_rtl_profile ();
2857   if (changed)
2858     find_many_sub_basic_blocks (blocks);
2859 
2860 #ifdef ENABLE_CHECKING
2861   verify_flow_info ();
2862 #endif
2863 
2864   sbitmap_free (blocks);
2865 }
2866 
2867 /* Same as split_all_insns, but do not expect CFG to be available.
2868    Used by machine dependent reorg passes.  */
2869 
2870 unsigned int
2871 split_all_insns_noflow (void)
2872 {
2873   rtx next, insn;
2874 
2875   for (insn = get_insns (); insn; insn = next)
2876     {
2877       next = NEXT_INSN (insn);
2878       if (INSN_P (insn))
2879 	{
2880 	  /* Don't split no-op move insns.  These should silently
2881 	     disappear later in final.  Splitting such insns would
2882 	     break the code that handles LIBCALL blocks.  */
2883 	  rtx set = single_set (insn);
2884 	  if (set && set_noop_p (set))
2885 	    {
2886 	      /* Nops get in the way while scheduling, so delete them
2887 		 now if register allocation has already been done.  It
2888 		 is too risky to try to do this before register
2889 		 allocation, and there are unlikely to be very many
2890 		 nops then anyways.
2891 
2892 		 ??? Should we use delete_insn when the CFG isn't valid?  */
2893 	      if (reload_completed)
2894 		delete_insn_and_edges (insn);
2895 	    }
2896 	  else
2897 	    split_insn (insn);
2898 	}
2899     }
2900   return 0;
2901 }
2902 
2903 #ifdef HAVE_peephole2
2904 struct peep2_insn_data
2905 {
2906   rtx insn;
2907   regset live_before;
2908 };
2909 
2910 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2911 static int peep2_current;
2912 /* The number of instructions available to match a peep2.  */
2913 int peep2_current_count;
2914 
2915 /* A non-insn marker indicating the last insn of the block.
2916    The live_before regset for this element is correct, indicating
2917    DF_LIVE_OUT for the block.  */
2918 #define PEEP2_EOB	pc_rtx
2919 
2920 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2921    does not exist.  Used by the recognizer to find the next insn to match
2922    in a multi-insn pattern.  */
2923 
2924 rtx
2925 peep2_next_insn (int n)
2926 {
2927   gcc_assert (n <= peep2_current_count);
2928 
2929   n += peep2_current;
2930   if (n >= MAX_INSNS_PER_PEEP2 + 1)
2931     n -= MAX_INSNS_PER_PEEP2 + 1;
2932 
2933   return peep2_insn_data[n].insn;
2934 }
2935 
2936 /* Return true if REGNO is dead before the Nth non-note insn
2937    after `current'.  */
2938 
2939 int
2940 peep2_regno_dead_p (int ofs, int regno)
2941 {
2942   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2943 
2944   ofs += peep2_current;
2945   if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2946     ofs -= MAX_INSNS_PER_PEEP2 + 1;
2947 
2948   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2949 
2950   return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2951 }
2952 
2953 /* Similarly for a REG.  */
2954 
2955 int
2956 peep2_reg_dead_p (int ofs, rtx reg)
2957 {
2958   int regno, n;
2959 
2960   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2961 
2962   ofs += peep2_current;
2963   if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2964     ofs -= MAX_INSNS_PER_PEEP2 + 1;
2965 
2966   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2967 
2968   regno = REGNO (reg);
2969   n = hard_regno_nregs[regno][GET_MODE (reg)];
2970   while (--n >= 0)
2971     if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2972       return 0;
2973   return 1;
2974 }
2975 
2976 /* Try to find a hard register of mode MODE, matching the register class in
2977    CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2978    remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
2979    in which case the only condition is that the register must be available
2980    before CURRENT_INSN.
2981    Registers that already have bits set in REG_SET will not be considered.
2982 
2983    If an appropriate register is available, it will be returned and the
2984    corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2985    returned.  */
2986 
2987 rtx
2988 peep2_find_free_register (int from, int to, const char *class_str,
2989 			  enum machine_mode mode, HARD_REG_SET *reg_set)
2990 {
2991   static int search_ofs;
2992   enum reg_class cl;
2993   HARD_REG_SET live;
2994   df_ref *def_rec;
2995   int i;
2996 
2997   gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2998   gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2999 
3000   from += peep2_current;
3001   if (from >= MAX_INSNS_PER_PEEP2 + 1)
3002     from -= MAX_INSNS_PER_PEEP2 + 1;
3003   to += peep2_current;
3004   if (to >= MAX_INSNS_PER_PEEP2 + 1)
3005     to -= MAX_INSNS_PER_PEEP2 + 1;
3006 
3007   gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3008   REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3009 
3010   while (from != to)
3011     {
3012       gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3013 
3014       /* Don't use registers set or clobbered by the insn.  */
3015       for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3016 	   *def_rec; def_rec++)
3017 	SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3018 
3019       if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3020 	from = 0;
3021     }
3022 
3023   cl = (class_str[0] == 'r' ? GENERAL_REGS
3024 	   : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3025 
3026   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3027     {
3028       int raw_regno, regno, success, j;
3029 
3030       /* Distribute the free registers as much as possible.  */
3031       raw_regno = search_ofs + i;
3032       if (raw_regno >= FIRST_PSEUDO_REGISTER)
3033 	raw_regno -= FIRST_PSEUDO_REGISTER;
3034 #ifdef REG_ALLOC_ORDER
3035       regno = reg_alloc_order[raw_regno];
3036 #else
3037       regno = raw_regno;
3038 #endif
3039 
3040       /* Don't allocate fixed registers.  */
3041       if (fixed_regs[regno])
3042 	continue;
3043       /* Don't allocate global registers.  */
3044       if (global_regs[regno])
3045 	continue;
3046       /* Make sure the register is of the right class.  */
3047       if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3048 	continue;
3049       /* And can support the mode we need.  */
3050       if (! HARD_REGNO_MODE_OK (regno, mode))
3051 	continue;
3052       /* And that we don't create an extra save/restore.  */
3053       if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3054 	continue;
3055       if (! targetm.hard_regno_scratch_ok (regno))
3056 	continue;
3057 
3058       /* And we don't clobber traceback for noreturn functions.  */
3059       if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3060 	  && (! reload_completed || frame_pointer_needed))
3061 	continue;
3062 
3063       success = 1;
3064       for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3065 	{
3066 	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3067 	      || TEST_HARD_REG_BIT (live, regno + j))
3068 	    {
3069 	      success = 0;
3070 	      break;
3071 	    }
3072 	}
3073       if (success)
3074 	{
3075 	  add_to_hard_reg_set (reg_set, mode, regno);
3076 
3077 	  /* Start the next search with the next register.  */
3078 	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3079 	    raw_regno = 0;
3080 	  search_ofs = raw_regno;
3081 
3082 	  return gen_rtx_REG (mode, regno);
3083 	}
3084     }
3085 
3086   search_ofs = 0;
3087   return NULL_RTX;
3088 }
3089 
3090 /* Forget all currently tracked instructions, only remember current
3091    LIVE regset.  */
3092 
3093 static void
3094 peep2_reinit_state (regset live)
3095 {
3096   int i;
3097 
3098   /* Indicate that all slots except the last holds invalid data.  */
3099   for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3100     peep2_insn_data[i].insn = NULL_RTX;
3101   peep2_current_count = 0;
3102 
3103   /* Indicate that the last slot contains live_after data.  */
3104   peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3105   peep2_current = MAX_INSNS_PER_PEEP2;
3106 
3107   COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3108 }
3109 
3110 /* Perform the peephole2 optimization pass.  */
3111 
3112 static void
3113 peephole2_optimize (void)
3114 {
3115   rtx insn, prev;
3116   bitmap live;
3117   int i;
3118   basic_block bb;
3119   bool do_cleanup_cfg = false;
3120   bool do_rebuild_jump_labels = false;
3121 
3122   df_set_flags (DF_LR_RUN_DCE);
3123   df_analyze ();
3124 
3125   /* Initialize the regsets we're going to use.  */
3126   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3127     peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3128   live = BITMAP_ALLOC (&reg_obstack);
3129 
3130   FOR_EACH_BB_REVERSE (bb)
3131     {
3132       rtl_profile_for_bb (bb);
3133 
3134       /* Start up propagation.  */
3135       bitmap_copy (live, DF_LR_OUT (bb));
3136       df_simulate_initialize_backwards (bb, live);
3137       peep2_reinit_state (live);
3138 
3139       for (insn = BB_END (bb); ; insn = prev)
3140 	{
3141 	  prev = PREV_INSN (insn);
3142 	  if (NONDEBUG_INSN_P (insn))
3143 	    {
3144 	      rtx attempt, before_try, x;
3145 	      int match_len;
3146 	      rtx note;
3147 	      bool was_call = false;
3148 
3149 	      /* Record this insn.  */
3150 	      if (--peep2_current < 0)
3151 		peep2_current = MAX_INSNS_PER_PEEP2;
3152 	      if (peep2_current_count < MAX_INSNS_PER_PEEP2
3153 		  && peep2_insn_data[peep2_current].insn == NULL_RTX)
3154 		peep2_current_count++;
3155 	      peep2_insn_data[peep2_current].insn = insn;
3156 	      df_simulate_one_insn_backwards (bb, insn, live);
3157 	      COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3158 
3159 	      if (RTX_FRAME_RELATED_P (insn))
3160 		{
3161 		  /* If an insn has RTX_FRAME_RELATED_P set, peephole
3162 		     substitution would lose the
3163 		     REG_FRAME_RELATED_EXPR that is attached.  */
3164 		  peep2_reinit_state (live);
3165 		  attempt = NULL;
3166 		}
3167 	      else
3168 		/* Match the peephole.  */
3169 		attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3170 
3171 	      if (attempt != NULL)
3172 		{
3173 		  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3174 		     in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3175 		     cfg-related call notes.  */
3176 		  for (i = 0; i <= match_len; ++i)
3177 		    {
3178 		      int j;
3179 		      rtx old_insn, new_insn, note;
3180 
3181 		      j = i + peep2_current;
3182 		      if (j >= MAX_INSNS_PER_PEEP2 + 1)
3183 			j -= MAX_INSNS_PER_PEEP2 + 1;
3184 		      old_insn = peep2_insn_data[j].insn;
3185 		      if (!CALL_P (old_insn))
3186 			continue;
3187 		      was_call = true;
3188 
3189 		      new_insn = attempt;
3190 		      while (new_insn != NULL_RTX)
3191 			{
3192 			  if (CALL_P (new_insn))
3193 			    break;
3194 			  new_insn = NEXT_INSN (new_insn);
3195 			}
3196 
3197 		      gcc_assert (new_insn != NULL_RTX);
3198 
3199 		      CALL_INSN_FUNCTION_USAGE (new_insn)
3200 			= CALL_INSN_FUNCTION_USAGE (old_insn);
3201 
3202 		      for (note = REG_NOTES (old_insn);
3203 			   note;
3204 			   note = XEXP (note, 1))
3205 			switch (REG_NOTE_KIND (note))
3206 			  {
3207 			  case REG_NORETURN:
3208 			  case REG_SETJMP:
3209 			    add_reg_note (new_insn, REG_NOTE_KIND (note),
3210 					  XEXP (note, 0));
3211 			    break;
3212 			  default:
3213 			    /* Discard all other reg notes.  */
3214 			    break;
3215 			  }
3216 
3217 		      /* Croak if there is another call in the sequence.  */
3218 		      while (++i <= match_len)
3219 			{
3220 			  j = i + peep2_current;
3221 			  if (j >= MAX_INSNS_PER_PEEP2 + 1)
3222 			    j -= MAX_INSNS_PER_PEEP2 + 1;
3223 			  old_insn = peep2_insn_data[j].insn;
3224 			  gcc_assert (!CALL_P (old_insn));
3225 			}
3226 		      break;
3227 		    }
3228 
3229 		  i = match_len + peep2_current;
3230 		  if (i >= MAX_INSNS_PER_PEEP2 + 1)
3231 		    i -= MAX_INSNS_PER_PEEP2 + 1;
3232 
3233 		  note = find_reg_note (peep2_insn_data[i].insn,
3234 					REG_EH_REGION, NULL_RTX);
3235 
3236 		  /* Replace the old sequence with the new.  */
3237 		  attempt = emit_insn_after_setloc (attempt,
3238 						    peep2_insn_data[i].insn,
3239 				       INSN_LOCATOR (peep2_insn_data[i].insn));
3240 		  before_try = PREV_INSN (insn);
3241 		  delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3242 
3243 		  /* Re-insert the EH_REGION notes.  */
3244 		  if (note || (was_call && nonlocal_goto_handler_labels))
3245 		    {
3246 		      edge eh_edge;
3247 		      edge_iterator ei;
3248 
3249 		      FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3250 			if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3251 			  break;
3252 
3253 		      if (note)
3254 			copy_reg_eh_region_note_backward (note, attempt,
3255 							  before_try);
3256 
3257 		      if (eh_edge)
3258 			for (x = attempt ; x != before_try ; x = PREV_INSN (x))
3259 			  if (x != BB_END (bb)
3260 			      && (can_throw_internal (x)
3261 				  || can_nonlocal_goto (x)))
3262 			    {
3263 			      edge nfte, nehe;
3264 			      int flags;
3265 
3266 			      nfte = split_block (bb, x);
3267 			      flags = (eh_edge->flags
3268 				       & (EDGE_EH | EDGE_ABNORMAL));
3269 			      if (CALL_P (x))
3270 				flags |= EDGE_ABNORMAL_CALL;
3271 			      nehe = make_edge (nfte->src, eh_edge->dest,
3272 						flags);
3273 
3274 			      nehe->probability = eh_edge->probability;
3275 			      nfte->probability
3276 				= REG_BR_PROB_BASE - nehe->probability;
3277 
3278 			      do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3279 			      bb = nfte->src;
3280 			      eh_edge = nehe;
3281 			    }
3282 
3283 		      /* Converting possibly trapping insn to non-trapping is
3284 			 possible.  Zap dummy outgoing edges.  */
3285 		      do_cleanup_cfg |= purge_dead_edges (bb);
3286 		    }
3287 
3288 		  if (targetm.have_conditional_execution ())
3289 		    {
3290 		      for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3291 			peep2_insn_data[i].insn = NULL_RTX;
3292 		      peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3293 		      peep2_current_count = 0;
3294 		    }
3295 		  else
3296 		    {
3297 		      /* Back up lifetime information past the end of the
3298 			 newly created sequence.  */
3299 		      if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3300 			i = 0;
3301 		      bitmap_copy (live, peep2_insn_data[i].live_before);
3302 
3303 		      /* Update life information for the new sequence.  */
3304 		      x = attempt;
3305 		      do
3306 			{
3307 			  if (INSN_P (x))
3308 			    {
3309 			      if (--i < 0)
3310 				i = MAX_INSNS_PER_PEEP2;
3311 			      if (peep2_current_count < MAX_INSNS_PER_PEEP2
3312 				  && peep2_insn_data[i].insn == NULL_RTX)
3313 				peep2_current_count++;
3314 			      peep2_insn_data[i].insn = x;
3315 			      df_insn_rescan (x);
3316 			      df_simulate_one_insn_backwards (bb, x, live);
3317 			      bitmap_copy (peep2_insn_data[i].live_before,
3318 					   live);
3319 			    }
3320 			  x = PREV_INSN (x);
3321 			}
3322 		      while (x != prev);
3323 
3324 		      peep2_current = i;
3325 		    }
3326 
3327 		  /* If we generated a jump instruction, it won't have
3328 		     JUMP_LABEL set.  Recompute after we're done.  */
3329 		  for (x = attempt; x != before_try; x = PREV_INSN (x))
3330 		    if (JUMP_P (x))
3331 		      {
3332 		        do_rebuild_jump_labels = true;
3333 			break;
3334 		      }
3335 		}
3336 	    }
3337 
3338 	  if (insn == BB_HEAD (bb))
3339 	    break;
3340 	}
3341     }
3342 
3343   default_rtl_profile ();
3344   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3345     BITMAP_FREE (peep2_insn_data[i].live_before);
3346   BITMAP_FREE (live);
3347   if (do_rebuild_jump_labels)
3348     rebuild_jump_labels (get_insns ());
3349 }
3350 #endif /* HAVE_peephole2 */
3351 
3352 /* Common predicates for use with define_bypass.  */
3353 
3354 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3355    data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3356    must be either a single_set or a PARALLEL with SETs inside.  */
3357 
3358 int
3359 store_data_bypass_p (rtx out_insn, rtx in_insn)
3360 {
3361   rtx out_set, in_set;
3362   rtx out_pat, in_pat;
3363   rtx out_exp, in_exp;
3364   int i, j;
3365 
3366   in_set = single_set (in_insn);
3367   if (in_set)
3368     {
3369       if (!MEM_P (SET_DEST (in_set)))
3370 	return false;
3371 
3372       out_set = single_set (out_insn);
3373       if (out_set)
3374         {
3375           if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3376             return false;
3377         }
3378       else
3379         {
3380           out_pat = PATTERN (out_insn);
3381 
3382 	  if (GET_CODE (out_pat) != PARALLEL)
3383 	    return false;
3384 
3385           for (i = 0; i < XVECLEN (out_pat, 0); i++)
3386           {
3387             out_exp = XVECEXP (out_pat, 0, i);
3388 
3389             if (GET_CODE (out_exp) == CLOBBER)
3390               continue;
3391 
3392             gcc_assert (GET_CODE (out_exp) == SET);
3393 
3394             if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3395               return false;
3396           }
3397       }
3398     }
3399   else
3400     {
3401       in_pat = PATTERN (in_insn);
3402       gcc_assert (GET_CODE (in_pat) == PARALLEL);
3403 
3404       for (i = 0; i < XVECLEN (in_pat, 0); i++)
3405 	{
3406 	  in_exp = XVECEXP (in_pat, 0, i);
3407 
3408 	  if (GET_CODE (in_exp) == CLOBBER)
3409 	    continue;
3410 
3411 	  gcc_assert (GET_CODE (in_exp) == SET);
3412 
3413 	  if (!MEM_P (SET_DEST (in_exp)))
3414 	    return false;
3415 
3416           out_set = single_set (out_insn);
3417           if (out_set)
3418             {
3419               if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3420                 return false;
3421             }
3422           else
3423             {
3424               out_pat = PATTERN (out_insn);
3425               gcc_assert (GET_CODE (out_pat) == PARALLEL);
3426 
3427               for (j = 0; j < XVECLEN (out_pat, 0); j++)
3428                 {
3429                   out_exp = XVECEXP (out_pat, 0, j);
3430 
3431                   if (GET_CODE (out_exp) == CLOBBER)
3432                     continue;
3433 
3434                   gcc_assert (GET_CODE (out_exp) == SET);
3435 
3436                   if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3437                     return false;
3438                 }
3439             }
3440         }
3441     }
3442 
3443   return true;
3444 }
3445 
3446 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3447    condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3448    or multiple set; IN_INSN should be single_set for truth, but for convenience
3449    of insn categorization may be any JUMP or CALL insn.  */
3450 
3451 int
3452 if_test_bypass_p (rtx out_insn, rtx in_insn)
3453 {
3454   rtx out_set, in_set;
3455 
3456   in_set = single_set (in_insn);
3457   if (! in_set)
3458     {
3459       gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3460       return false;
3461     }
3462 
3463   if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3464     return false;
3465   in_set = SET_SRC (in_set);
3466 
3467   out_set = single_set (out_insn);
3468   if (out_set)
3469     {
3470       if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3471 	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3472 	return false;
3473     }
3474   else
3475     {
3476       rtx out_pat;
3477       int i;
3478 
3479       out_pat = PATTERN (out_insn);
3480       gcc_assert (GET_CODE (out_pat) == PARALLEL);
3481 
3482       for (i = 0; i < XVECLEN (out_pat, 0); i++)
3483 	{
3484 	  rtx exp = XVECEXP (out_pat, 0, i);
3485 
3486 	  if (GET_CODE (exp) == CLOBBER)
3487 	    continue;
3488 
3489 	  gcc_assert (GET_CODE (exp) == SET);
3490 
3491 	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3492 	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3493 	    return false;
3494 	}
3495     }
3496 
3497   return true;
3498 }
3499 
3500 static bool
3501 gate_handle_peephole2 (void)
3502 {
3503   return (optimize > 0 && flag_peephole2);
3504 }
3505 
3506 static unsigned int
3507 rest_of_handle_peephole2 (void)
3508 {
3509 #ifdef HAVE_peephole2
3510   peephole2_optimize ();
3511 #endif
3512   return 0;
3513 }
3514 
3515 struct rtl_opt_pass pass_peephole2 =
3516 {
3517  {
3518   RTL_PASS,
3519   "peephole2",                          /* name */
3520   gate_handle_peephole2,                /* gate */
3521   rest_of_handle_peephole2,             /* execute */
3522   NULL,                                 /* sub */
3523   NULL,                                 /* next */
3524   0,                                    /* static_pass_number */
3525   TV_PEEPHOLE2,                         /* tv_id */
3526   0,                                    /* properties_required */
3527   0,                                    /* properties_provided */
3528   0,                                    /* properties_destroyed */
3529   0,                                    /* todo_flags_start */
3530   TODO_df_finish | TODO_verify_rtl_sharing |
3531   TODO_dump_func                       /* todo_flags_finish */
3532  }
3533 };
3534 
3535 static unsigned int
3536 rest_of_handle_split_all_insns (void)
3537 {
3538   split_all_insns ();
3539   return 0;
3540 }
3541 
3542 struct rtl_opt_pass pass_split_all_insns =
3543 {
3544  {
3545   RTL_PASS,
3546   "split1",                             /* name */
3547   NULL,                                 /* gate */
3548   rest_of_handle_split_all_insns,       /* execute */
3549   NULL,                                 /* sub */
3550   NULL,                                 /* next */
3551   0,                                    /* static_pass_number */
3552   TV_NONE,                              /* tv_id */
3553   0,                                    /* properties_required */
3554   0,                                    /* properties_provided */
3555   0,                                    /* properties_destroyed */
3556   0,                                    /* todo_flags_start */
3557   TODO_dump_func                        /* todo_flags_finish */
3558  }
3559 };
3560 
3561 static unsigned int
3562 rest_of_handle_split_after_reload (void)
3563 {
3564   /* If optimizing, then go ahead and split insns now.  */
3565 #ifndef STACK_REGS
3566   if (optimize > 0)
3567 #endif
3568     split_all_insns ();
3569   return 0;
3570 }
3571 
3572 struct rtl_opt_pass pass_split_after_reload =
3573 {
3574  {
3575   RTL_PASS,
3576   "split2",                             /* name */
3577   NULL,                                 /* gate */
3578   rest_of_handle_split_after_reload,    /* execute */
3579   NULL,                                 /* sub */
3580   NULL,                                 /* next */
3581   0,                                    /* static_pass_number */
3582   TV_NONE,                              /* tv_id */
3583   0,                                    /* properties_required */
3584   0,                                    /* properties_provided */
3585   0,                                    /* properties_destroyed */
3586   0,                                    /* todo_flags_start */
3587   TODO_dump_func                        /* todo_flags_finish */
3588  }
3589 };
3590 
3591 static bool
3592 gate_handle_split_before_regstack (void)
3593 {
3594 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3595   /* If flow2 creates new instructions which need splitting
3596      and scheduling after reload is not done, they might not be
3597      split until final which doesn't allow splitting
3598      if HAVE_ATTR_length.  */
3599 # ifdef INSN_SCHEDULING
3600   return (optimize && !flag_schedule_insns_after_reload);
3601 # else
3602   return (optimize);
3603 # endif
3604 #else
3605   return 0;
3606 #endif
3607 }
3608 
3609 static unsigned int
3610 rest_of_handle_split_before_regstack (void)
3611 {
3612   split_all_insns ();
3613   return 0;
3614 }
3615 
3616 struct rtl_opt_pass pass_split_before_regstack =
3617 {
3618  {
3619   RTL_PASS,
3620   "split3",                             /* name */
3621   gate_handle_split_before_regstack,    /* gate */
3622   rest_of_handle_split_before_regstack, /* execute */
3623   NULL,                                 /* sub */
3624   NULL,                                 /* next */
3625   0,                                    /* static_pass_number */
3626   TV_NONE,                              /* tv_id */
3627   0,                                    /* properties_required */
3628   0,                                    /* properties_provided */
3629   0,                                    /* properties_destroyed */
3630   0,                                    /* todo_flags_start */
3631   TODO_dump_func                        /* todo_flags_finish */
3632  }
3633 };
3634 
3635 static bool
3636 gate_handle_split_before_sched2 (void)
3637 {
3638 #ifdef INSN_SCHEDULING
3639   return optimize > 0 && flag_schedule_insns_after_reload;
3640 #else
3641   return 0;
3642 #endif
3643 }
3644 
3645 static unsigned int
3646 rest_of_handle_split_before_sched2 (void)
3647 {
3648 #ifdef INSN_SCHEDULING
3649   split_all_insns ();
3650 #endif
3651   return 0;
3652 }
3653 
3654 struct rtl_opt_pass pass_split_before_sched2 =
3655 {
3656  {
3657   RTL_PASS,
3658   "split4",                             /* name */
3659   gate_handle_split_before_sched2,      /* gate */
3660   rest_of_handle_split_before_sched2,   /* execute */
3661   NULL,                                 /* sub */
3662   NULL,                                 /* next */
3663   0,                                    /* static_pass_number */
3664   TV_NONE,                              /* tv_id */
3665   0,                                    /* properties_required */
3666   0,                                    /* properties_provided */
3667   0,                                    /* properties_destroyed */
3668   0,                                    /* todo_flags_start */
3669   TODO_verify_flow |
3670   TODO_dump_func                        /* todo_flags_finish */
3671  }
3672 };
3673 
3674 /* The placement of the splitting that we do for shorten_branches
3675    depends on whether regstack is used by the target or not.  */
3676 static bool
3677 gate_do_final_split (void)
3678 {
3679 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3680   return 1;
3681 #else
3682   return 0;
3683 #endif
3684 }
3685 
3686 struct rtl_opt_pass pass_split_for_shorten_branches =
3687 {
3688  {
3689   RTL_PASS,
3690   "split5",                             /* name */
3691   gate_do_final_split,                  /* gate */
3692   split_all_insns_noflow,               /* execute */
3693   NULL,                                 /* sub */
3694   NULL,                                 /* next */
3695   0,                                    /* static_pass_number */
3696   TV_NONE,                              /* tv_id */
3697   0,                                    /* properties_required */
3698   0,                                    /* properties_provided */
3699   0,                                    /* properties_destroyed */
3700   0,                                    /* todo_flags_start */
3701   TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
3702  }
3703 };
3704