xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/recog.c (revision d909946ca08dceb44d7d0f22ec9488679695d976)
1 /* Subroutines used by or related to instruction recognition.
2    Copyright (C) 1987-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl-error.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "addresses.h"
33 #include "expr.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "basic-block.h"
37 #include "reload.h"
38 #include "target.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "insn-codes.h"
42 
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50 
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58 
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
60 static void validate_replace_src_1 (rtx *, void *);
61 static rtx split_insn (rtx);
62 
63 /* Nonzero means allow operands to be volatile.
64    This should be 0 if you are generating rtl, such as if you are calling
65    the functions in optabs.c and expmed.c (most of the time).
66    This should be 1 if all valid insns need to be recognized,
67    such as in reginfo.c and final.c and reload.c.
68 
69    init_recog and init_recog_no_volatile are responsible for setting this.  */
70 
71 int volatile_ok;
72 
73 struct recog_data recog_data;
74 
75 /* Contains a vector of operand_alternative structures for every operand.
76    Set up by preprocess_constraints.  */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 
79 /* On return from `constrain_operands', indicate which alternative
80    was satisfied.  */
81 
82 int which_alternative;
83 
84 /* Nonzero after end of reload pass.
85    Set to 1 or 0 by toplev.c.
86    Controls the significance of (SUBREG (MEM)).  */
87 
88 int reload_completed;
89 
90 /* Nonzero after thread_prologue_and_epilogue_insns has run.  */
91 int epilogue_completed;
92 
93 /* Initialize data used by the function `recog'.
94    This must be called once in the compilation of a function
95    before any insn recognition may be done in the function.  */
96 
97 void
98 init_recog_no_volatile (void)
99 {
100   volatile_ok = 0;
101 }
102 
103 void
104 init_recog (void)
105 {
106   volatile_ok = 1;
107 }
108 
109 
110 /* Return true if labels in asm operands BODY are LABEL_REFs.  */
111 
112 static bool
113 asm_labels_ok (rtx body)
114 {
115   rtx asmop;
116   int i;
117 
118   asmop = extract_asm_operands (body);
119   if (asmop == NULL_RTX)
120     return true;
121 
122   for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
123     if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
124       return false;
125 
126   return true;
127 }
128 
129 /* Check that X is an insn-body for an `asm' with operands
130    and that the operands mentioned in it are legitimate.  */
131 
132 int
133 check_asm_operands (rtx x)
134 {
135   int noperands;
136   rtx *operands;
137   const char **constraints;
138   int i;
139 
140   if (!asm_labels_ok (x))
141     return 0;
142 
143   /* Post-reload, be more strict with things.  */
144   if (reload_completed)
145     {
146       /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
147       extract_insn (make_insn_raw (x));
148       constrain_operands (1);
149       return which_alternative >= 0;
150     }
151 
152   noperands = asm_noperands (x);
153   if (noperands < 0)
154     return 0;
155   if (noperands == 0)
156     return 1;
157 
158   operands = XALLOCAVEC (rtx, noperands);
159   constraints = XALLOCAVEC (const char *, noperands);
160 
161   decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
162 
163   for (i = 0; i < noperands; i++)
164     {
165       const char *c = constraints[i];
166       if (c[0] == '%')
167 	c++;
168       if (! asm_operand_ok (operands[i], c, constraints))
169 	return 0;
170     }
171 
172   return 1;
173 }
174 
175 /* Static data for the next two routines.  */
176 
177 typedef struct change_t
178 {
179   rtx object;
180   int old_code;
181   rtx *loc;
182   rtx old;
183   bool unshare;
184 } change_t;
185 
186 static change_t *changes;
187 static int changes_allocated;
188 
189 static int num_changes = 0;
190 
191 /* Validate a proposed change to OBJECT.  LOC is the location in the rtl
192    at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
193    the change is simply made.
194 
195    Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
196    will be called with the address and mode as parameters.  If OBJECT is
197    an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
198    the change in place.
199 
200    IN_GROUP is nonzero if this is part of a group of changes that must be
201    performed as a group.  In that case, the changes will be stored.  The
202    function `apply_change_group' will validate and apply the changes.
203 
204    If IN_GROUP is zero, this is a single change.  Try to recognize the insn
205    or validate the memory reference with the change applied.  If the result
206    is not valid for the machine, suppress the change and return zero.
207    Otherwise, perform the change and return 1.  */
208 
209 static bool
210 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
211 {
212   rtx old = *loc;
213 
214   if (old == new_rtx || rtx_equal_p (old, new_rtx))
215     return 1;
216 
217   gcc_assert (in_group != 0 || num_changes == 0);
218 
219   *loc = new_rtx;
220 
221   /* Save the information describing this change.  */
222   if (num_changes >= changes_allocated)
223     {
224       if (changes_allocated == 0)
225 	/* This value allows for repeated substitutions inside complex
226 	   indexed addresses, or changes in up to 5 insns.  */
227 	changes_allocated = MAX_RECOG_OPERANDS * 5;
228       else
229 	changes_allocated *= 2;
230 
231       changes = XRESIZEVEC (change_t, changes, changes_allocated);
232     }
233 
234   changes[num_changes].object = object;
235   changes[num_changes].loc = loc;
236   changes[num_changes].old = old;
237   changes[num_changes].unshare = unshare;
238 
239   if (object && !MEM_P (object))
240     {
241       /* Set INSN_CODE to force rerecognition of insn.  Save old code in
242 	 case invalid.  */
243       changes[num_changes].old_code = INSN_CODE (object);
244       INSN_CODE (object) = -1;
245     }
246 
247   num_changes++;
248 
249   /* If we are making a group of changes, return 1.  Otherwise, validate the
250      change group we made.  */
251 
252   if (in_group)
253     return 1;
254   else
255     return apply_change_group ();
256 }
257 
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259    UNSHARE to false.  */
260 
261 bool
262 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
263 {
264   return validate_change_1 (object, loc, new_rtx, in_group, false);
265 }
266 
267 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
268    UNSHARE to true.  */
269 
270 bool
271 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
272 {
273   return validate_change_1 (object, loc, new_rtx, in_group, true);
274 }
275 
276 
277 /* Keep X canonicalized if some changes have made it non-canonical; only
278    modifies the operands of X, not (for example) its code.  Simplifications
279    are not the job of this routine.
280 
281    Return true if anything was changed.  */
282 bool
283 canonicalize_change_group (rtx insn, rtx x)
284 {
285   if (COMMUTATIVE_P (x)
286       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
287     {
288       /* Oops, the caller has made X no longer canonical.
289 	 Let's redo the changes in the correct order.  */
290       rtx tem = XEXP (x, 0);
291       validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
292       validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
293       return true;
294     }
295   else
296     return false;
297 }
298 
299 
300 /* This subroutine of apply_change_group verifies whether the changes to INSN
301    were valid; i.e. whether INSN can still be recognized.
302 
303    If IN_GROUP is true clobbers which have to be added in order to
304    match the instructions will be added to the current change group.
305    Otherwise the changes will take effect immediately.  */
306 
307 int
308 insn_invalid_p (rtx insn, bool in_group)
309 {
310   rtx pat = PATTERN (insn);
311   int num_clobbers = 0;
312   /* If we are before reload and the pattern is a SET, see if we can add
313      clobbers.  */
314   int icode = recog (pat, insn,
315 		     (GET_CODE (pat) == SET
316 		      && ! reload_completed && ! reload_in_progress)
317 		     ? &num_clobbers : 0);
318   int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
319 
320 
321   /* If this is an asm and the operand aren't legal, then fail.  Likewise if
322      this is not an asm and the insn wasn't recognized.  */
323   if ((is_asm && ! check_asm_operands (PATTERN (insn)))
324       || (!is_asm && icode < 0))
325     return 1;
326 
327   /* If we have to add CLOBBERs, fail if we have to add ones that reference
328      hard registers since our callers can't know if they are live or not.
329      Otherwise, add them.  */
330   if (num_clobbers > 0)
331     {
332       rtx newpat;
333 
334       if (added_clobbers_hard_reg_p (icode))
335 	return 1;
336 
337       newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
338       XVECEXP (newpat, 0, 0) = pat;
339       add_clobbers (newpat, icode);
340       if (in_group)
341 	validate_change (insn, &PATTERN (insn), newpat, 1);
342       else
343 	PATTERN (insn) = pat = newpat;
344     }
345 
346   /* After reload, verify that all constraints are satisfied.  */
347   if (reload_completed)
348     {
349       extract_insn (insn);
350 
351       if (! constrain_operands (1))
352 	return 1;
353     }
354 
355   INSN_CODE (insn) = icode;
356   return 0;
357 }
358 
359 /* Return number of changes made and not validated yet.  */
360 int
361 num_changes_pending (void)
362 {
363   return num_changes;
364 }
365 
366 /* Tentatively apply the changes numbered NUM and up.
367    Return 1 if all changes are valid, zero otherwise.  */
368 
369 int
370 verify_changes (int num)
371 {
372   int i;
373   rtx last_validated = NULL_RTX;
374 
375   /* The changes have been applied and all INSN_CODEs have been reset to force
376      rerecognition.
377 
378      The changes are valid if we aren't given an object, or if we are
379      given a MEM and it still is a valid address, or if this is in insn
380      and it is recognized.  In the latter case, if reload has completed,
381      we also require that the operands meet the constraints for
382      the insn.  */
383 
384   for (i = num; i < num_changes; i++)
385     {
386       rtx object = changes[i].object;
387 
388       /* If there is no object to test or if it is the same as the one we
389          already tested, ignore it.  */
390       if (object == 0 || object == last_validated)
391 	continue;
392 
393       if (MEM_P (object))
394 	{
395 	  if (! memory_address_addr_space_p (GET_MODE (object),
396 					     XEXP (object, 0),
397 					     MEM_ADDR_SPACE (object)))
398 	    break;
399 	}
400       else if (REG_P (changes[i].old)
401 	       && asm_noperands (PATTERN (object)) > 0
402 	       && REG_EXPR (changes[i].old) != NULL_TREE
403 	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
404 	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
405 	{
406 	  /* Don't allow changes of hard register operands to inline
407 	     assemblies if they have been defined as register asm ("x").  */
408 	  break;
409 	}
410       else if (DEBUG_INSN_P (object))
411 	continue;
412       else if (insn_invalid_p (object, true))
413 	{
414 	  rtx pat = PATTERN (object);
415 
416 	  /* Perhaps we couldn't recognize the insn because there were
417 	     extra CLOBBERs at the end.  If so, try to re-recognize
418 	     without the last CLOBBER (later iterations will cause each of
419 	     them to be eliminated, in turn).  But don't do this if we
420 	     have an ASM_OPERAND.  */
421 	  if (GET_CODE (pat) == PARALLEL
422 	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
423 	      && asm_noperands (PATTERN (object)) < 0)
424 	    {
425 	      rtx newpat;
426 
427 	      if (XVECLEN (pat, 0) == 2)
428 		newpat = XVECEXP (pat, 0, 0);
429 	      else
430 		{
431 		  int j;
432 
433 		  newpat
434 		    = gen_rtx_PARALLEL (VOIDmode,
435 					rtvec_alloc (XVECLEN (pat, 0) - 1));
436 		  for (j = 0; j < XVECLEN (newpat, 0); j++)
437 		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
438 		}
439 
440 	      /* Add a new change to this group to replace the pattern
441 		 with this new pattern.  Then consider this change
442 		 as having succeeded.  The change we added will
443 		 cause the entire call to fail if things remain invalid.
444 
445 		 Note that this can lose if a later change than the one
446 		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
447 		 but this shouldn't occur.  */
448 
449 	      validate_change (object, &PATTERN (object), newpat, 1);
450 	      continue;
451 	    }
452 	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
453 		   || GET_CODE (pat) == VAR_LOCATION)
454 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
455 	       never recognized.  */
456 	    continue;
457 	  else
458 	    break;
459 	}
460       last_validated = object;
461     }
462 
463   return (i == num_changes);
464 }
465 
466 /* A group of changes has previously been issued with validate_change
467    and verified with verify_changes.  Call df_insn_rescan for each of
468    the insn changed and clear num_changes.  */
469 
470 void
471 confirm_change_group (void)
472 {
473   int i;
474   rtx last_object = NULL;
475 
476   for (i = 0; i < num_changes; i++)
477     {
478       rtx object = changes[i].object;
479 
480       if (changes[i].unshare)
481 	*changes[i].loc = copy_rtx (*changes[i].loc);
482 
483       /* Avoid unnecessary rescanning when multiple changes to same instruction
484          are made.  */
485       if (object)
486 	{
487 	  if (object != last_object && last_object && INSN_P (last_object))
488 	    df_insn_rescan (last_object);
489 	  last_object = object;
490 	}
491     }
492 
493   if (last_object && INSN_P (last_object))
494     df_insn_rescan (last_object);
495   num_changes = 0;
496 }
497 
498 /* Apply a group of changes previously issued with `validate_change'.
499    If all changes are valid, call confirm_change_group and return 1,
500    otherwise, call cancel_changes and return 0.  */
501 
502 int
503 apply_change_group (void)
504 {
505   if (verify_changes (0))
506     {
507       confirm_change_group ();
508       return 1;
509     }
510   else
511     {
512       cancel_changes (0);
513       return 0;
514     }
515 }
516 
517 
518 /* Return the number of changes so far in the current group.  */
519 
520 int
521 num_validated_changes (void)
522 {
523   return num_changes;
524 }
525 
526 /* Retract the changes numbered NUM and up.  */
527 
528 void
529 cancel_changes (int num)
530 {
531   int i;
532 
533   /* Back out all the changes.  Do this in the opposite order in which
534      they were made.  */
535   for (i = num_changes - 1; i >= num; i--)
536     {
537       *changes[i].loc = changes[i].old;
538       if (changes[i].object && !MEM_P (changes[i].object))
539 	INSN_CODE (changes[i].object) = changes[i].old_code;
540     }
541   num_changes = num;
542 }
543 
544 /* Reduce conditional compilation elsewhere.  */
545 #ifndef HAVE_extv
546 #define HAVE_extv	0
547 #define CODE_FOR_extv	CODE_FOR_nothing
548 #endif
549 #ifndef HAVE_extzv
550 #define HAVE_extzv	0
551 #define CODE_FOR_extzv	CODE_FOR_nothing
552 #endif
553 
554 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
555    rtx.  */
556 
557 static void
558 simplify_while_replacing (rtx *loc, rtx to, rtx object,
559                           enum machine_mode op0_mode)
560 {
561   rtx x = *loc;
562   enum rtx_code code = GET_CODE (x);
563   rtx new_rtx;
564 
565   if (SWAPPABLE_OPERANDS_P (x)
566       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
567     {
568       validate_unshare_change (object, loc,
569 			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
570 					       : swap_condition (code),
571 					       GET_MODE (x), XEXP (x, 1),
572 					       XEXP (x, 0)), 1);
573       x = *loc;
574       code = GET_CODE (x);
575     }
576 
577   switch (code)
578     {
579     case PLUS:
580       /* If we have a PLUS whose second operand is now a CONST_INT, use
581          simplify_gen_binary to try to simplify it.
582          ??? We may want later to remove this, once simplification is
583          separated from this function.  */
584       if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
585 	validate_change (object, loc,
586 			 simplify_gen_binary
587 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
588       break;
589     case MINUS:
590       if (CONST_SCALAR_INT_P (XEXP (x, 1)))
591 	validate_change (object, loc,
592 			 simplify_gen_binary
593 			 (PLUS, GET_MODE (x), XEXP (x, 0),
594 			  simplify_gen_unary (NEG,
595 					      GET_MODE (x), XEXP (x, 1),
596 					      GET_MODE (x))), 1);
597       break;
598     case ZERO_EXTEND:
599     case SIGN_EXTEND:
600       if (GET_MODE (XEXP (x, 0)) == VOIDmode)
601 	{
602 	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
603 				    op0_mode);
604 	  /* If any of the above failed, substitute in something that
605 	     we know won't be recognized.  */
606 	  if (!new_rtx)
607 	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
608 	  validate_change (object, loc, new_rtx, 1);
609 	}
610       break;
611     case SUBREG:
612       /* All subregs possible to simplify should be simplified.  */
613       new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
614 			     SUBREG_BYTE (x));
615 
616       /* Subregs of VOIDmode operands are incorrect.  */
617       if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
618 	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
619       if (new_rtx)
620 	validate_change (object, loc, new_rtx, 1);
621       break;
622     case ZERO_EXTRACT:
623     case SIGN_EXTRACT:
624       /* If we are replacing a register with memory, try to change the memory
625          to be the mode required for memory in extract operations (this isn't
626          likely to be an insertion operation; if it was, nothing bad will
627          happen, we might just fail in some cases).  */
628 
629       if (MEM_P (XEXP (x, 0))
630 	  && CONST_INT_P (XEXP (x, 1))
631 	  && CONST_INT_P (XEXP (x, 2))
632 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
633 					MEM_ADDR_SPACE (XEXP (x, 0)))
634 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
635 	{
636 	  enum machine_mode wanted_mode = VOIDmode;
637 	  enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
638 	  int pos = INTVAL (XEXP (x, 2));
639 
640 	  if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
641 	    {
642 	      wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
643 	      if (wanted_mode == VOIDmode)
644 		wanted_mode = word_mode;
645 	    }
646 	  else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
647 	    {
648 	      wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
649 	      if (wanted_mode == VOIDmode)
650 		wanted_mode = word_mode;
651 	    }
652 
653 	  /* If we have a narrower mode, we can do something.  */
654 	  if (wanted_mode != VOIDmode
655 	      && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
656 	    {
657 	      int offset = pos / BITS_PER_UNIT;
658 	      rtx newmem;
659 
660 	      /* If the bytes and bits are counted differently, we
661 	         must adjust the offset.  */
662 	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
663 		offset =
664 		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
665 		   offset);
666 
667 	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
668 			  == GET_MODE_BITSIZE (wanted_mode));
669 	      pos %= GET_MODE_BITSIZE (wanted_mode);
670 
671 	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
672 
673 	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
674 	      validate_change (object, &XEXP (x, 0), newmem, 1);
675 	    }
676 	}
677 
678       break;
679 
680     default:
681       break;
682     }
683 }
684 
685 /* Replace every occurrence of FROM in X with TO.  Mark each change with
686    validate_change passing OBJECT.  */
687 
688 static void
689 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
690                         bool simplify)
691 {
692   int i, j;
693   const char *fmt;
694   rtx x = *loc;
695   enum rtx_code code;
696   enum machine_mode op0_mode = VOIDmode;
697   int prev_changes = num_changes;
698 
699   if (!x)
700     return;
701 
702   code = GET_CODE (x);
703   fmt = GET_RTX_FORMAT (code);
704   if (fmt[0] == 'e')
705     op0_mode = GET_MODE (XEXP (x, 0));
706 
707   /* X matches FROM if it is the same rtx or they are both referring to the
708      same register in the same mode.  Avoid calling rtx_equal_p unless the
709      operands look similar.  */
710 
711   if (x == from
712       || (REG_P (x) && REG_P (from)
713 	  && GET_MODE (x) == GET_MODE (from)
714 	  && REGNO (x) == REGNO (from))
715       || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
716 	  && rtx_equal_p (x, from)))
717     {
718       validate_unshare_change (object, loc, to, 1);
719       return;
720     }
721 
722   /* Call ourself recursively to perform the replacements.
723      We must not replace inside already replaced expression, otherwise we
724      get infinite recursion for replacements like (reg X)->(subreg (reg X))
725      done by regmove, so we must special case shared ASM_OPERANDS.  */
726 
727   if (GET_CODE (x) == PARALLEL)
728     {
729       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
730 	{
731 	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
732 	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
733 	    {
734 	      /* Verify that operands are really shared.  */
735 	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
736 			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
737 							      (x, 0, j))));
738 	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
739 				      from, to, object, simplify);
740 	    }
741 	  else
742 	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
743                                     simplify);
744 	}
745     }
746   else
747     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
748       {
749 	if (fmt[i] == 'e')
750 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
751 	else if (fmt[i] == 'E')
752 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
753 	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
754                                     simplify);
755       }
756 
757   /* If we didn't substitute, there is nothing more to do.  */
758   if (num_changes == prev_changes)
759     return;
760 
761   /* Allow substituted expression to have different mode.  This is used by
762      regmove to change mode of pseudo register.  */
763   if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
764     op0_mode = GET_MODE (XEXP (x, 0));
765 
766   /* Do changes needed to keep rtx consistent.  Don't do any other
767      simplifications, as it is not our job.  */
768   if (simplify)
769     simplify_while_replacing (loc, to, object, op0_mode);
770 }
771 
772 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
773    with TO.  After all changes have been made, validate by seeing
774    if INSN is still valid.  */
775 
776 int
777 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
778 {
779   validate_replace_rtx_1 (loc, from, to, insn, true);
780   return apply_change_group ();
781 }
782 
783 /* Try replacing every occurrence of FROM in INSN with TO.  After all
784    changes have been made, validate by seeing if INSN is still valid.  */
785 
786 int
787 validate_replace_rtx (rtx from, rtx to, rtx insn)
788 {
789   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
790   return apply_change_group ();
791 }
792 
793 /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
794    is a part of INSN.  After all changes have been made, validate by seeing if
795    INSN is still valid.
796    validate_replace_rtx (from, to, insn) is equivalent to
797    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
798 
799 int
800 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
801 {
802   validate_replace_rtx_1 (where, from, to, insn, true);
803   return apply_change_group ();
804 }
805 
806 /* Same as above, but do not simplify rtx afterwards.  */
807 int
808 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
809                                       rtx insn)
810 {
811   validate_replace_rtx_1 (where, from, to, insn, false);
812   return apply_change_group ();
813 
814 }
815 
816 /* Try replacing every occurrence of FROM in INSN with TO.  This also
817    will replace in REG_EQUAL and REG_EQUIV notes.  */
818 
819 void
820 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
821 {
822   rtx note;
823   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
824   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
825     if (REG_NOTE_KIND (note) == REG_EQUAL
826 	|| REG_NOTE_KIND (note) == REG_EQUIV)
827       validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
828 }
829 
830 /* Function called by note_uses to replace used subexpressions.  */
831 struct validate_replace_src_data
832 {
833   rtx from;			/* Old RTX */
834   rtx to;			/* New RTX */
835   rtx insn;			/* Insn in which substitution is occurring.  */
836 };
837 
838 static void
839 validate_replace_src_1 (rtx *x, void *data)
840 {
841   struct validate_replace_src_data *d
842     = (struct validate_replace_src_data *) data;
843 
844   validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
845 }
846 
847 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
848    SET_DESTs.  */
849 
850 void
851 validate_replace_src_group (rtx from, rtx to, rtx insn)
852 {
853   struct validate_replace_src_data d;
854 
855   d.from = from;
856   d.to = to;
857   d.insn = insn;
858   note_uses (&PATTERN (insn), validate_replace_src_1, &d);
859 }
860 
861 /* Try simplify INSN.
862    Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
863    pattern and return true if something was simplified.  */
864 
865 bool
866 validate_simplify_insn (rtx insn)
867 {
868   int i;
869   rtx pat = NULL;
870   rtx newpat = NULL;
871 
872   pat = PATTERN (insn);
873 
874   if (GET_CODE (pat) == SET)
875     {
876       newpat = simplify_rtx (SET_SRC (pat));
877       if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
878 	validate_change (insn, &SET_SRC (pat), newpat, 1);
879       newpat = simplify_rtx (SET_DEST (pat));
880       if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
881 	validate_change (insn, &SET_DEST (pat), newpat, 1);
882     }
883   else if (GET_CODE (pat) == PARALLEL)
884     for (i = 0; i < XVECLEN (pat, 0); i++)
885       {
886 	rtx s = XVECEXP (pat, 0, i);
887 
888 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
889 	  {
890 	    newpat = simplify_rtx (SET_SRC (s));
891 	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
892 	      validate_change (insn, &SET_SRC (s), newpat, 1);
893 	    newpat = simplify_rtx (SET_DEST (s));
894 	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
895 	      validate_change (insn, &SET_DEST (s), newpat, 1);
896 	  }
897       }
898   return ((num_changes_pending () > 0) && (apply_change_group () > 0));
899 }
900 
901 #ifdef HAVE_cc0
902 /* Return 1 if the insn using CC0 set by INSN does not contain
903    any ordered tests applied to the condition codes.
904    EQ and NE tests do not count.  */
905 
906 int
907 next_insn_tests_no_inequality (rtx insn)
908 {
909   rtx next = next_cc0_user (insn);
910 
911   /* If there is no next insn, we have to take the conservative choice.  */
912   if (next == 0)
913     return 0;
914 
915   return (INSN_P (next)
916 	  && ! inequality_comparisons_p (PATTERN (next)));
917 }
918 #endif
919 
920 /* Return 1 if OP is a valid general operand for machine mode MODE.
921    This is either a register reference, a memory reference,
922    or a constant.  In the case of a memory reference, the address
923    is checked for general validity for the target machine.
924 
925    Register and memory references must have mode MODE in order to be valid,
926    but some constants have no machine mode and are valid for any mode.
927 
928    If MODE is VOIDmode, OP is checked for validity for whatever mode
929    it has.
930 
931    The main use of this function is as a predicate in match_operand
932    expressions in the machine description.  */
933 
934 int
935 general_operand (rtx op, enum machine_mode mode)
936 {
937   enum rtx_code code = GET_CODE (op);
938 
939   if (mode == VOIDmode)
940     mode = GET_MODE (op);
941 
942   /* Don't accept CONST_INT or anything similar
943      if the caller wants something floating.  */
944   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
945       && GET_MODE_CLASS (mode) != MODE_INT
946       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
947     return 0;
948 
949   if (CONST_INT_P (op)
950       && mode != VOIDmode
951       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
952     return 0;
953 
954   if (CONSTANT_P (op))
955     return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
956 	     || mode == VOIDmode)
957 	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
958 	    && targetm.legitimate_constant_p (mode == VOIDmode
959 					      ? GET_MODE (op)
960 					      : mode, op));
961 
962   /* Except for certain constants with VOIDmode, already checked for,
963      OP's mode must match MODE if MODE specifies a mode.  */
964 
965   if (GET_MODE (op) != mode)
966     return 0;
967 
968   if (code == SUBREG)
969     {
970       rtx sub = SUBREG_REG (op);
971 
972 #ifdef INSN_SCHEDULING
973       /* On machines that have insn scheduling, we want all memory
974 	 reference to be explicit, so outlaw paradoxical SUBREGs.
975 	 However, we must allow them after reload so that they can
976 	 get cleaned up by cleanup_subreg_operands.  */
977       if (!reload_completed && MEM_P (sub)
978 	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
979 	return 0;
980 #endif
981       /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
982          may result in incorrect reference.  We should simplify all valid
983          subregs of MEM anyway.  But allow this after reload because we
984 	 might be called from cleanup_subreg_operands.
985 
986 	 ??? This is a kludge.  */
987       if (!reload_completed && SUBREG_BYTE (op) != 0
988 	  && MEM_P (sub))
989 	return 0;
990 
991       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
992 	 create such rtl, and we must reject it.  */
993       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
994 	  /* LRA can use subreg to store a floating point value in an
995 	     integer mode.  Although the floating point and the
996 	     integer modes need the same number of hard registers, the
997 	     size of floating point mode can be less than the integer
998 	     mode.  */
999 	  && ! lra_in_progress
1000 	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1001 	return 0;
1002 
1003       op = sub;
1004       code = GET_CODE (op);
1005     }
1006 
1007   if (code == REG)
1008     return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1009 	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1010 
1011   if (code == MEM)
1012     {
1013       rtx y = XEXP (op, 0);
1014 
1015       if (! volatile_ok && MEM_VOLATILE_P (op))
1016 	return 0;
1017 
1018       /* Use the mem's mode, since it will be reloaded thus.  */
1019       if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1020 	return 1;
1021     }
1022 
1023   return 0;
1024 }
1025 
1026 /* Return 1 if OP is a valid memory address for a memory reference
1027    of mode MODE.
1028 
1029    The main use of this function is as a predicate in match_operand
1030    expressions in the machine description.  */
1031 
1032 int
1033 address_operand (rtx op, enum machine_mode mode)
1034 {
1035   return memory_address_p (mode, op);
1036 }
1037 
1038 /* Return 1 if OP is a register reference of mode MODE.
1039    If MODE is VOIDmode, accept a register in any mode.
1040 
1041    The main use of this function is as a predicate in match_operand
1042    expressions in the machine description.  */
1043 
1044 int
1045 register_operand (rtx op, enum machine_mode mode)
1046 {
1047   if (GET_MODE (op) != mode && mode != VOIDmode)
1048     return 0;
1049 
1050   if (GET_CODE (op) == SUBREG)
1051     {
1052       rtx sub = SUBREG_REG (op);
1053 
1054       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1055 	 because it is guaranteed to be reloaded into one.
1056 	 Just make sure the MEM is valid in itself.
1057 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1058 	 but currently it does result from (SUBREG (REG)...) where the
1059 	 reg went on the stack.)  */
1060       if (! reload_completed && MEM_P (sub))
1061 	return general_operand (op, mode);
1062 
1063 #ifdef CANNOT_CHANGE_MODE_CLASS
1064       if (REG_P (sub)
1065 	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1066 	  && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1067 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1068 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1069 	return 0;
1070 #endif
1071 
1072       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1073 	 create such rtl, and we must reject it.  */
1074       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1075 	  /* LRA can use subreg to store a floating point value in an
1076 	     integer mode.  Although the floating point and the
1077 	     integer modes need the same number of hard registers, the
1078 	     size of floating point mode can be less than the integer
1079 	     mode.  */
1080 	  && ! lra_in_progress
1081 	  && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1082 	return 0;
1083 
1084       op = sub;
1085     }
1086 
1087   return (REG_P (op)
1088 	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1089 	      || in_hard_reg_set_p (operand_reg_set,
1090 				    GET_MODE (op), REGNO (op))));
1091 }
1092 
1093 /* Return 1 for a register in Pmode; ignore the tested mode.  */
1094 
1095 int
1096 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1097 {
1098   return register_operand (op, Pmode);
1099 }
1100 
1101 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1102    or a hard register.  */
1103 
1104 int
1105 scratch_operand (rtx op, enum machine_mode mode)
1106 {
1107   if (GET_MODE (op) != mode && mode != VOIDmode)
1108     return 0;
1109 
1110   return (GET_CODE (op) == SCRATCH
1111 	  || (REG_P (op)
1112 	      && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1113 }
1114 
1115 /* Return 1 if OP is a valid immediate operand for mode MODE.
1116 
1117    The main use of this function is as a predicate in match_operand
1118    expressions in the machine description.  */
1119 
1120 int
1121 immediate_operand (rtx op, enum machine_mode mode)
1122 {
1123   /* Don't accept CONST_INT or anything similar
1124      if the caller wants something floating.  */
1125   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1126       && GET_MODE_CLASS (mode) != MODE_INT
1127       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1128     return 0;
1129 
1130   if (CONST_INT_P (op)
1131       && mode != VOIDmode
1132       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1133     return 0;
1134 
1135   return (CONSTANT_P (op)
1136 	  && (GET_MODE (op) == mode || mode == VOIDmode
1137 	      || GET_MODE (op) == VOIDmode)
1138 	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1139 	  && targetm.legitimate_constant_p (mode == VOIDmode
1140 					    ? GET_MODE (op)
1141 					    : mode, op));
1142 }
1143 
1144 /* Returns 1 if OP is an operand that is a CONST_INT.  */
1145 
1146 int
1147 const_int_operand (rtx op, enum machine_mode mode)
1148 {
1149   if (!CONST_INT_P (op))
1150     return 0;
1151 
1152   if (mode != VOIDmode
1153       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1154     return 0;
1155 
1156   return 1;
1157 }
1158 
1159 /* Returns 1 if OP is an operand that is a constant integer or constant
1160    floating-point number.  */
1161 
1162 int
1163 const_double_operand (rtx op, enum machine_mode mode)
1164 {
1165   /* Don't accept CONST_INT or anything similar
1166      if the caller wants something floating.  */
1167   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1168       && GET_MODE_CLASS (mode) != MODE_INT
1169       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1170     return 0;
1171 
1172   return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1173 	  && (mode == VOIDmode || GET_MODE (op) == mode
1174 	      || GET_MODE (op) == VOIDmode));
1175 }
1176 
1177 /* Return 1 if OP is a general operand that is not an immediate operand.  */
1178 
1179 int
1180 nonimmediate_operand (rtx op, enum machine_mode mode)
1181 {
1182   return (general_operand (op, mode) && ! CONSTANT_P (op));
1183 }
1184 
1185 /* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1186 
1187 int
1188 nonmemory_operand (rtx op, enum machine_mode mode)
1189 {
1190   if (CONSTANT_P (op))
1191     return immediate_operand (op, mode);
1192 
1193   if (GET_MODE (op) != mode && mode != VOIDmode)
1194     return 0;
1195 
1196   if (GET_CODE (op) == SUBREG)
1197     {
1198       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1199 	 because it is guaranteed to be reloaded into one.
1200 	 Just make sure the MEM is valid in itself.
1201 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1202 	 but currently it does result from (SUBREG (REG)...) where the
1203 	 reg went on the stack.)  */
1204       if (! reload_completed && MEM_P (SUBREG_REG (op)))
1205 	return general_operand (op, mode);
1206       op = SUBREG_REG (op);
1207     }
1208 
1209   return (REG_P (op)
1210 	  && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1211 	      || in_hard_reg_set_p (operand_reg_set,
1212 				    GET_MODE (op), REGNO (op))));
1213 }
1214 
1215 /* Return 1 if OP is a valid operand that stands for pushing a
1216    value of mode MODE onto the stack.
1217 
1218    The main use of this function is as a predicate in match_operand
1219    expressions in the machine description.  */
1220 
1221 int
1222 push_operand (rtx op, enum machine_mode mode)
1223 {
1224   unsigned int rounded_size = GET_MODE_SIZE (mode);
1225 
1226 #ifdef PUSH_ROUNDING
1227   rounded_size = PUSH_ROUNDING (rounded_size);
1228 #endif
1229 
1230   if (!MEM_P (op))
1231     return 0;
1232 
1233   if (mode != VOIDmode && GET_MODE (op) != mode)
1234     return 0;
1235 
1236   op = XEXP (op, 0);
1237 
1238   if (rounded_size == GET_MODE_SIZE (mode))
1239     {
1240       if (GET_CODE (op) != STACK_PUSH_CODE)
1241 	return 0;
1242     }
1243   else
1244     {
1245       if (GET_CODE (op) != PRE_MODIFY
1246 	  || GET_CODE (XEXP (op, 1)) != PLUS
1247 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1248 	  || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1249 #ifdef STACK_GROWS_DOWNWARD
1250 	  || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1251 #else
1252 	  || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1253 #endif
1254 	  )
1255 	return 0;
1256     }
1257 
1258   return XEXP (op, 0) == stack_pointer_rtx;
1259 }
1260 
1261 /* Return 1 if OP is a valid operand that stands for popping a
1262    value of mode MODE off the stack.
1263 
1264    The main use of this function is as a predicate in match_operand
1265    expressions in the machine description.  */
1266 
1267 int
1268 pop_operand (rtx op, enum machine_mode mode)
1269 {
1270   if (!MEM_P (op))
1271     return 0;
1272 
1273   if (mode != VOIDmode && GET_MODE (op) != mode)
1274     return 0;
1275 
1276   op = XEXP (op, 0);
1277 
1278   if (GET_CODE (op) != STACK_POP_CODE)
1279     return 0;
1280 
1281   return XEXP (op, 0) == stack_pointer_rtx;
1282 }
1283 
1284 /* Return 1 if ADDR is a valid memory address
1285    for mode MODE in address space AS.  */
1286 
1287 int
1288 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1289 			     rtx addr, addr_space_t as)
1290 {
1291 #ifdef GO_IF_LEGITIMATE_ADDRESS
1292   gcc_assert (ADDR_SPACE_GENERIC_P (as));
1293   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1294   return 0;
1295 
1296  win:
1297   return 1;
1298 #else
1299   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1300 #endif
1301 }
1302 
1303 /* Return 1 if OP is a valid memory reference with mode MODE,
1304    including a valid address.
1305 
1306    The main use of this function is as a predicate in match_operand
1307    expressions in the machine description.  */
1308 
1309 int
1310 memory_operand (rtx op, enum machine_mode mode)
1311 {
1312   rtx inner;
1313 
1314   if (! reload_completed)
1315     /* Note that no SUBREG is a memory operand before end of reload pass,
1316        because (SUBREG (MEM...)) forces reloading into a register.  */
1317     return MEM_P (op) && general_operand (op, mode);
1318 
1319   if (mode != VOIDmode && GET_MODE (op) != mode)
1320     return 0;
1321 
1322   inner = op;
1323   if (GET_CODE (inner) == SUBREG)
1324     inner = SUBREG_REG (inner);
1325 
1326   return (MEM_P (inner) && general_operand (op, mode));
1327 }
1328 
1329 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1330    that is, a memory reference whose address is a general_operand.  */
1331 
1332 int
1333 indirect_operand (rtx op, enum machine_mode mode)
1334 {
1335   /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1336   if (! reload_completed
1337       && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1338     {
1339       int offset = SUBREG_BYTE (op);
1340       rtx inner = SUBREG_REG (op);
1341 
1342       if (mode != VOIDmode && GET_MODE (op) != mode)
1343 	return 0;
1344 
1345       /* The only way that we can have a general_operand as the resulting
1346 	 address is if OFFSET is zero and the address already is an operand
1347 	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1348 	 operand.  */
1349 
1350       return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1351 	      || (GET_CODE (XEXP (inner, 0)) == PLUS
1352 		  && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1353 		  && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1354 		  && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1355     }
1356 
1357   return (MEM_P (op)
1358 	  && memory_operand (op, mode)
1359 	  && general_operand (XEXP (op, 0), Pmode));
1360 }
1361 
1362 /* Return 1 if this is an ordered comparison operator (not including
1363    ORDERED and UNORDERED).  */
1364 
1365 int
1366 ordered_comparison_operator (rtx op, enum machine_mode mode)
1367 {
1368   if (mode != VOIDmode && GET_MODE (op) != mode)
1369     return false;
1370   switch (GET_CODE (op))
1371     {
1372     case EQ:
1373     case NE:
1374     case LT:
1375     case LTU:
1376     case LE:
1377     case LEU:
1378     case GT:
1379     case GTU:
1380     case GE:
1381     case GEU:
1382       return true;
1383     default:
1384       return false;
1385     }
1386 }
1387 
1388 /* Return 1 if this is a comparison operator.  This allows the use of
1389    MATCH_OPERATOR to recognize all the branch insns.  */
1390 
1391 int
1392 comparison_operator (rtx op, enum machine_mode mode)
1393 {
1394   return ((mode == VOIDmode || GET_MODE (op) == mode)
1395 	  && COMPARISON_P (op));
1396 }
1397 
1398 /* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1399 
1400 rtx
1401 extract_asm_operands (rtx body)
1402 {
1403   rtx tmp;
1404   switch (GET_CODE (body))
1405     {
1406     case ASM_OPERANDS:
1407       return body;
1408 
1409     case SET:
1410       /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1411       tmp = SET_SRC (body);
1412       if (GET_CODE (tmp) == ASM_OPERANDS)
1413 	return tmp;
1414       break;
1415 
1416     case PARALLEL:
1417       tmp = XVECEXP (body, 0, 0);
1418       if (GET_CODE (tmp) == ASM_OPERANDS)
1419 	return tmp;
1420       if (GET_CODE (tmp) == SET)
1421 	{
1422 	  tmp = SET_SRC (tmp);
1423 	  if (GET_CODE (tmp) == ASM_OPERANDS)
1424 	    return tmp;
1425 	}
1426       break;
1427 
1428     default:
1429       break;
1430     }
1431   return NULL;
1432 }
1433 
1434 /* If BODY is an insn body that uses ASM_OPERANDS,
1435    return the number of operands (both input and output) in the insn.
1436    Otherwise return -1.  */
1437 
1438 int
1439 asm_noperands (const_rtx body)
1440 {
1441   rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1442   int n_sets = 0;
1443 
1444   if (asm_op == NULL)
1445     return -1;
1446 
1447   if (GET_CODE (body) == SET)
1448     n_sets = 1;
1449   else if (GET_CODE (body) == PARALLEL)
1450     {
1451       int i;
1452       if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1453 	{
1454 	  /* Multiple output operands, or 1 output plus some clobbers:
1455 	     body is
1456 	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1457 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1458 	  for (i = XVECLEN (body, 0); i > 0; i--)
1459 	    {
1460 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1461 		break;
1462 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1463 		return -1;
1464 	    }
1465 
1466 	  /* N_SETS is now number of output operands.  */
1467 	  n_sets = i;
1468 
1469 	  /* Verify that all the SETs we have
1470 	     came from a single original asm_operands insn
1471 	     (so that invalid combinations are blocked).  */
1472 	  for (i = 0; i < n_sets; i++)
1473 	    {
1474 	      rtx elt = XVECEXP (body, 0, i);
1475 	      if (GET_CODE (elt) != SET)
1476 		return -1;
1477 	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1478 		return -1;
1479 	      /* If these ASM_OPERANDS rtx's came from different original insns
1480 	         then they aren't allowed together.  */
1481 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1482 		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1483 		return -1;
1484 	    }
1485 	}
1486       else
1487 	{
1488 	  /* 0 outputs, but some clobbers:
1489 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1490 	  /* Make sure all the other parallel things really are clobbers.  */
1491 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1492 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1493 	      return -1;
1494 	}
1495     }
1496 
1497   return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1498 	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1499 }
1500 
1501 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1502    copy its operands (both input and output) into the vector OPERANDS,
1503    the locations of the operands within the insn into the vector OPERAND_LOCS,
1504    and the constraints for the operands into CONSTRAINTS.
1505    Write the modes of the operands into MODES.
1506    Return the assembler-template.
1507 
1508    If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1509    we don't store that info.  */
1510 
1511 const char *
1512 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1513 		     const char **constraints, enum machine_mode *modes,
1514 		     location_t *loc)
1515 {
1516   int nbase = 0, n, i;
1517   rtx asmop;
1518 
1519   switch (GET_CODE (body))
1520     {
1521     case ASM_OPERANDS:
1522       /* Zero output asm: BODY is (asm_operands ...).  */
1523       asmop = body;
1524       break;
1525 
1526     case SET:
1527       /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1528       asmop = SET_SRC (body);
1529 
1530       /* The output is in the SET.
1531 	 Its constraint is in the ASM_OPERANDS itself.  */
1532       if (operands)
1533 	operands[0] = SET_DEST (body);
1534       if (operand_locs)
1535 	operand_locs[0] = &SET_DEST (body);
1536       if (constraints)
1537 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1538       if (modes)
1539 	modes[0] = GET_MODE (SET_DEST (body));
1540       nbase = 1;
1541       break;
1542 
1543     case PARALLEL:
1544       {
1545 	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1546 
1547 	asmop = XVECEXP (body, 0, 0);
1548 	if (GET_CODE (asmop) == SET)
1549 	  {
1550 	    asmop = SET_SRC (asmop);
1551 
1552 	    /* At least one output, plus some CLOBBERs.  The outputs are in
1553 	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1554 	    for (i = 0; i < nparallel; i++)
1555 	      {
1556 		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1557 		  break;		/* Past last SET */
1558 		if (operands)
1559 		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1560 		if (operand_locs)
1561 		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1562 		if (constraints)
1563 		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1564 		if (modes)
1565 		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1566 	      }
1567 	    nbase = i;
1568 	  }
1569 	break;
1570       }
1571 
1572     default:
1573       gcc_unreachable ();
1574     }
1575 
1576   n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1577   for (i = 0; i < n; i++)
1578     {
1579       if (operand_locs)
1580 	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1581       if (operands)
1582 	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1583       if (constraints)
1584 	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1585       if (modes)
1586 	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1587     }
1588   nbase += n;
1589 
1590   n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1591   for (i = 0; i < n; i++)
1592     {
1593       if (operand_locs)
1594 	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1595       if (operands)
1596 	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1597       if (constraints)
1598 	constraints[nbase + i] = "";
1599       if (modes)
1600 	modes[nbase + i] = Pmode;
1601     }
1602 
1603   if (loc)
1604     *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1605 
1606   return ASM_OPERANDS_TEMPLATE (asmop);
1607 }
1608 
1609 /* Check if an asm_operand matches its constraints.
1610    Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1611 
1612 int
1613 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1614 {
1615   int result = 0;
1616 #ifdef AUTO_INC_DEC
1617   bool incdec_ok = false;
1618 #endif
1619 
1620   /* Use constrain_operands after reload.  */
1621   gcc_assert (!reload_completed);
1622 
1623   /* Empty constraint string is the same as "X,...,X", i.e. X for as
1624      many alternatives as required to match the other operands.  */
1625   if (*constraint == '\0')
1626     result = 1;
1627 
1628   while (*constraint)
1629     {
1630       char c = *constraint;
1631       int len;
1632       switch (c)
1633 	{
1634 	case ',':
1635 	  constraint++;
1636 	  continue;
1637 	case '=':
1638 	case '+':
1639 	case '*':
1640 	case '%':
1641 	case '!':
1642 	case '#':
1643 	case '&':
1644 	case '?':
1645 	  break;
1646 
1647 	case '0': case '1': case '2': case '3': case '4':
1648 	case '5': case '6': case '7': case '8': case '9':
1649 	  /* If caller provided constraints pointer, look up
1650 	     the maching constraint.  Otherwise, our caller should have
1651 	     given us the proper matching constraint, but we can't
1652 	     actually fail the check if they didn't.  Indicate that
1653 	     results are inconclusive.  */
1654 	  if (constraints)
1655 	    {
1656 	      char *end;
1657 	      unsigned long match;
1658 
1659 	      match = strtoul (constraint, &end, 10);
1660 	      if (!result)
1661 		result = asm_operand_ok (op, constraints[match], NULL);
1662 	      constraint = (const char *) end;
1663 	    }
1664 	  else
1665 	    {
1666 	      do
1667 		constraint++;
1668 	      while (ISDIGIT (*constraint));
1669 	      if (! result)
1670 		result = -1;
1671 	    }
1672 	  continue;
1673 
1674 	case 'p':
1675 	  if (address_operand (op, VOIDmode))
1676 	    result = 1;
1677 	  break;
1678 
1679 	case TARGET_MEM_CONSTRAINT:
1680 	case 'V': /* non-offsettable */
1681 	  if (memory_operand (op, VOIDmode))
1682 	    result = 1;
1683 	  break;
1684 
1685 	case 'o': /* offsettable */
1686 	  if (offsettable_nonstrict_memref_p (op))
1687 	    result = 1;
1688 	  break;
1689 
1690 	case '<':
1691 	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1692 	     excepting those that expand_call created.  Further, on some
1693 	     machines which do not have generalized auto inc/dec, an inc/dec
1694 	     is not a memory_operand.
1695 
1696 	     Match any memory and hope things are resolved after reload.  */
1697 
1698 	  if (MEM_P (op)
1699 	      && (1
1700 		  || GET_CODE (XEXP (op, 0)) == PRE_DEC
1701 		  || GET_CODE (XEXP (op, 0)) == POST_DEC))
1702 	    result = 1;
1703 #ifdef AUTO_INC_DEC
1704 	  incdec_ok = true;
1705 #endif
1706 	  break;
1707 
1708 	case '>':
1709 	  if (MEM_P (op)
1710 	      && (1
1711 		  || GET_CODE (XEXP (op, 0)) == PRE_INC
1712 		  || GET_CODE (XEXP (op, 0)) == POST_INC))
1713 	    result = 1;
1714 #ifdef AUTO_INC_DEC
1715 	  incdec_ok = true;
1716 #endif
1717 	  break;
1718 
1719 	case 'E':
1720 	case 'F':
1721 	  if (CONST_DOUBLE_AS_FLOAT_P (op)
1722 	      || (GET_CODE (op) == CONST_VECTOR
1723 		  && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1724 	    result = 1;
1725 	  break;
1726 
1727 	case 'G':
1728 	  if (CONST_DOUBLE_AS_FLOAT_P (op)
1729 	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1730 	    result = 1;
1731 	  break;
1732 	case 'H':
1733 	  if (CONST_DOUBLE_AS_FLOAT_P (op)
1734 	      && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1735 	    result = 1;
1736 	  break;
1737 
1738 	case 's':
1739 	  if (CONST_SCALAR_INT_P (op))
1740 	    break;
1741 	  /* Fall through.  */
1742 
1743 	case 'i':
1744 	  if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1745 	    result = 1;
1746 	  break;
1747 
1748 	case 'n':
1749 	  if (CONST_SCALAR_INT_P (op))
1750 	    result = 1;
1751 	  break;
1752 
1753 	case 'I':
1754 	  if (CONST_INT_P (op)
1755 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1756 	    result = 1;
1757 	  break;
1758 	case 'J':
1759 	  if (CONST_INT_P (op)
1760 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1761 	    result = 1;
1762 	  break;
1763 	case 'K':
1764 	  if (CONST_INT_P (op)
1765 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1766 	    result = 1;
1767 	  break;
1768 	case 'L':
1769 	  if (CONST_INT_P (op)
1770 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1771 	    result = 1;
1772 	  break;
1773 	case 'M':
1774 	  if (CONST_INT_P (op)
1775 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1776 	    result = 1;
1777 	  break;
1778 	case 'N':
1779 	  if (CONST_INT_P (op)
1780 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1781 	    result = 1;
1782 	  break;
1783 	case 'O':
1784 	  if (CONST_INT_P (op)
1785 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1786 	    result = 1;
1787 	  break;
1788 	case 'P':
1789 	  if (CONST_INT_P (op)
1790 	      && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1791 	    result = 1;
1792 	  break;
1793 
1794 	case 'X':
1795 	  result = 1;
1796 	  break;
1797 
1798 	case 'g':
1799 	  if (general_operand (op, VOIDmode))
1800 	    result = 1;
1801 	  break;
1802 
1803 	default:
1804 	  /* For all other letters, we first check for a register class,
1805 	     otherwise it is an EXTRA_CONSTRAINT.  */
1806 	  if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1807 	    {
1808 	    case 'r':
1809 	      if (GET_MODE (op) == BLKmode)
1810 		break;
1811 	      if (register_operand (op, VOIDmode))
1812 		result = 1;
1813 	    }
1814 #ifdef EXTRA_CONSTRAINT_STR
1815 	  else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1816 	    /* Every memory operand can be reloaded to fit.  */
1817 	    result = result || memory_operand (op, VOIDmode);
1818 	  else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1819 	    /* Every address operand can be reloaded to fit.  */
1820 	    result = result || address_operand (op, VOIDmode);
1821 	  else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1822 	    result = 1;
1823 #endif
1824 	  break;
1825 	}
1826       len = CONSTRAINT_LEN (c, constraint);
1827       do
1828 	constraint++;
1829       while (--len && *constraint);
1830       if (len)
1831 	return 0;
1832     }
1833 
1834 #ifdef AUTO_INC_DEC
1835   /* For operands without < or > constraints reject side-effects.  */
1836   if (!incdec_ok && result && MEM_P (op))
1837     switch (GET_CODE (XEXP (op, 0)))
1838       {
1839       case PRE_INC:
1840       case POST_INC:
1841       case PRE_DEC:
1842       case POST_DEC:
1843       case PRE_MODIFY:
1844       case POST_MODIFY:
1845 	return 0;
1846       default:
1847 	break;
1848       }
1849 #endif
1850 
1851   return result;
1852 }
1853 
1854 /* Given an rtx *P, if it is a sum containing an integer constant term,
1855    return the location (type rtx *) of the pointer to that constant term.
1856    Otherwise, return a null pointer.  */
1857 
1858 rtx *
1859 find_constant_term_loc (rtx *p)
1860 {
1861   rtx *tem;
1862   enum rtx_code code = GET_CODE (*p);
1863 
1864   /* If *P IS such a constant term, P is its location.  */
1865 
1866   if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1867       || code == CONST)
1868     return p;
1869 
1870   /* Otherwise, if not a sum, it has no constant term.  */
1871 
1872   if (GET_CODE (*p) != PLUS)
1873     return 0;
1874 
1875   /* If one of the summands is constant, return its location.  */
1876 
1877   if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1878       && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1879     return p;
1880 
1881   /* Otherwise, check each summand for containing a constant term.  */
1882 
1883   if (XEXP (*p, 0) != 0)
1884     {
1885       tem = find_constant_term_loc (&XEXP (*p, 0));
1886       if (tem != 0)
1887 	return tem;
1888     }
1889 
1890   if (XEXP (*p, 1) != 0)
1891     {
1892       tem = find_constant_term_loc (&XEXP (*p, 1));
1893       if (tem != 0)
1894 	return tem;
1895     }
1896 
1897   return 0;
1898 }
1899 
1900 /* Return 1 if OP is a memory reference
1901    whose address contains no side effects
1902    and remains valid after the addition
1903    of a positive integer less than the
1904    size of the object being referenced.
1905 
1906    We assume that the original address is valid and do not check it.
1907 
1908    This uses strict_memory_address_p as a subroutine, so
1909    don't use it before reload.  */
1910 
1911 int
1912 offsettable_memref_p (rtx op)
1913 {
1914   return ((MEM_P (op))
1915 	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1916 					       MEM_ADDR_SPACE (op)));
1917 }
1918 
1919 /* Similar, but don't require a strictly valid mem ref:
1920    consider pseudo-regs valid as index or base regs.  */
1921 
1922 int
1923 offsettable_nonstrict_memref_p (rtx op)
1924 {
1925   return ((MEM_P (op))
1926 	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1927 					       MEM_ADDR_SPACE (op)));
1928 }
1929 
1930 /* Return 1 if Y is a memory address which contains no side effects
1931    and would remain valid for address space AS after the addition of
1932    a positive integer less than the size of that mode.
1933 
1934    We assume that the original address is valid and do not check it.
1935    We do check that it is valid for narrower modes.
1936 
1937    If STRICTP is nonzero, we require a strictly valid address,
1938    for the sake of use in reload.c.  */
1939 
1940 int
1941 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1942 				  addr_space_t as)
1943 {
1944   enum rtx_code ycode = GET_CODE (y);
1945   rtx z;
1946   rtx y1 = y;
1947   rtx *y2;
1948   int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1949     (strictp ? strict_memory_address_addr_space_p
1950 	     : memory_address_addr_space_p);
1951   unsigned int mode_sz = GET_MODE_SIZE (mode);
1952 
1953   if (CONSTANT_ADDRESS_P (y))
1954     return 1;
1955 
1956   /* Adjusting an offsettable address involves changing to a narrower mode.
1957      Make sure that's OK.  */
1958 
1959   if (mode_dependent_address_p (y, as))
1960     return 0;
1961 
1962   enum machine_mode address_mode = GET_MODE (y);
1963   if (address_mode == VOIDmode)
1964     address_mode = targetm.addr_space.address_mode (as);
1965 #ifdef POINTERS_EXTEND_UNSIGNED
1966   enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1967 #endif
1968 
1969   /* ??? How much offset does an offsettable BLKmode reference need?
1970      Clearly that depends on the situation in which it's being used.
1971      However, the current situation in which we test 0xffffffff is
1972      less than ideal.  Caveat user.  */
1973   if (mode_sz == 0)
1974     mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1975 
1976   /* If the expression contains a constant term,
1977      see if it remains valid when max possible offset is added.  */
1978 
1979   if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1980     {
1981       int good;
1982 
1983       y1 = *y2;
1984       *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1985       /* Use QImode because an odd displacement may be automatically invalid
1986 	 for any wider mode.  But it should be valid for a single byte.  */
1987       good = (*addressp) (QImode, y, as);
1988 
1989       /* In any case, restore old contents of memory.  */
1990       *y2 = y1;
1991       return good;
1992     }
1993 
1994   if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1995     return 0;
1996 
1997   /* The offset added here is chosen as the maximum offset that
1998      any instruction could need to add when operating on something
1999      of the specified mode.  We assume that if Y and Y+c are
2000      valid addresses then so is Y+d for all 0<d<c.  adjust_address will
2001      go inside a LO_SUM here, so we do so as well.  */
2002   if (GET_CODE (y) == LO_SUM
2003       && mode != BLKmode
2004       && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2005     z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2006 			plus_constant (address_mode, XEXP (y, 1),
2007 				       mode_sz - 1));
2008 #ifdef POINTERS_EXTEND_UNSIGNED
2009   /* Likewise for a ZERO_EXTEND from pointer_mode.  */
2010   else if (POINTERS_EXTEND_UNSIGNED > 0
2011 	   && GET_CODE (y) == ZERO_EXTEND
2012 	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2013     z = gen_rtx_ZERO_EXTEND (address_mode,
2014 			     plus_constant (pointer_mode, XEXP (y, 0),
2015 					    mode_sz - 1));
2016 #endif
2017   else
2018     z = plus_constant (address_mode, y, mode_sz - 1);
2019 
2020   /* Use QImode because an odd displacement may be automatically invalid
2021      for any wider mode.  But it should be valid for a single byte.  */
2022   return (*addressp) (QImode, z, as);
2023 }
2024 
2025 /* Return 1 if ADDR is an address-expression whose effect depends
2026    on the mode of the memory reference it is used in.
2027 
2028    ADDRSPACE is the address space associated with the address.
2029 
2030    Autoincrement addressing is a typical example of mode-dependence
2031    because the amount of the increment depends on the mode.  */
2032 
2033 bool
2034 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2035 {
2036   /* Auto-increment addressing with anything other than post_modify
2037      or pre_modify always introduces a mode dependency.  Catch such
2038      cases now instead of deferring to the target.  */
2039   if (GET_CODE (addr) == PRE_INC
2040       || GET_CODE (addr) == POST_INC
2041       || GET_CODE (addr) == PRE_DEC
2042       || GET_CODE (addr) == POST_DEC)
2043     return true;
2044 
2045   return targetm.mode_dependent_address_p (addr, addrspace);
2046 }
2047 
2048 /* Like extract_insn, but save insn extracted and don't extract again, when
2049    called again for the same insn expecting that recog_data still contain the
2050    valid information.  This is used primary by gen_attr infrastructure that
2051    often does extract insn again and again.  */
2052 void
2053 extract_insn_cached (rtx insn)
2054 {
2055   if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2056     return;
2057   extract_insn (insn);
2058   recog_data.insn = insn;
2059 }
2060 
2061 /* Do cached extract_insn, constrain_operands and complain about failures.
2062    Used by insn_attrtab.  */
2063 void
2064 extract_constrain_insn_cached (rtx insn)
2065 {
2066   extract_insn_cached (insn);
2067   if (which_alternative == -1
2068       && !constrain_operands (reload_completed))
2069     fatal_insn_not_found (insn);
2070 }
2071 
2072 /* Do cached constrain_operands and complain about failures.  */
2073 int
2074 constrain_operands_cached (int strict)
2075 {
2076   if (which_alternative == -1)
2077     return constrain_operands (strict);
2078   else
2079     return 1;
2080 }
2081 
2082 /* Analyze INSN and fill in recog_data.  */
2083 
2084 void
2085 extract_insn (rtx insn)
2086 {
2087   int i;
2088   int icode;
2089   int noperands;
2090   rtx body = PATTERN (insn);
2091 
2092   recog_data.n_operands = 0;
2093   recog_data.n_alternatives = 0;
2094   recog_data.n_dups = 0;
2095   recog_data.is_asm = false;
2096 
2097   switch (GET_CODE (body))
2098     {
2099     case USE:
2100     case CLOBBER:
2101     case ASM_INPUT:
2102     case ADDR_VEC:
2103     case ADDR_DIFF_VEC:
2104     case VAR_LOCATION:
2105       return;
2106 
2107     case SET:
2108       if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2109 	goto asm_insn;
2110       else
2111 	goto normal_insn;
2112     case PARALLEL:
2113       if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2114 	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2115 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2116 	goto asm_insn;
2117       else
2118 	goto normal_insn;
2119     case ASM_OPERANDS:
2120     asm_insn:
2121       recog_data.n_operands = noperands = asm_noperands (body);
2122       if (noperands >= 0)
2123 	{
2124 	  /* This insn is an `asm' with operands.  */
2125 
2126 	  /* expand_asm_operands makes sure there aren't too many operands.  */
2127 	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2128 
2129 	  /* Now get the operand values and constraints out of the insn.  */
2130 	  decode_asm_operands (body, recog_data.operand,
2131 			       recog_data.operand_loc,
2132 			       recog_data.constraints,
2133 			       recog_data.operand_mode, NULL);
2134 	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2135 	  if (noperands > 0)
2136 	    {
2137 	      const char *p =  recog_data.constraints[0];
2138 	      recog_data.n_alternatives = 1;
2139 	      while (*p)
2140 		recog_data.n_alternatives += (*p++ == ',');
2141 	    }
2142 	  recog_data.is_asm = true;
2143 	  break;
2144 	}
2145       fatal_insn_not_found (insn);
2146 
2147     default:
2148     normal_insn:
2149       /* Ordinary insn: recognize it, get the operands via insn_extract
2150 	 and get the constraints.  */
2151 
2152       icode = recog_memoized (insn);
2153       if (icode < 0)
2154 	fatal_insn_not_found (insn);
2155 
2156       recog_data.n_operands = noperands = insn_data[icode].n_operands;
2157       recog_data.n_alternatives = insn_data[icode].n_alternatives;
2158       recog_data.n_dups = insn_data[icode].n_dups;
2159 
2160       insn_extract (insn);
2161 
2162       for (i = 0; i < noperands; i++)
2163 	{
2164 	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2165 	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2166 	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2167 	  /* VOIDmode match_operands gets mode from their real operand.  */
2168 	  if (recog_data.operand_mode[i] == VOIDmode)
2169 	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2170 	}
2171     }
2172   for (i = 0; i < noperands; i++)
2173     recog_data.operand_type[i]
2174       = (recog_data.constraints[i][0] == '=' ? OP_OUT
2175 	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2176 	 : OP_IN);
2177 
2178   gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2179 
2180   if (INSN_CODE (insn) < 0)
2181     for (i = 0; i < recog_data.n_alternatives; i++)
2182       recog_data.alternative_enabled_p[i] = true;
2183   else
2184     {
2185       recog_data.insn = insn;
2186       for (i = 0; i < recog_data.n_alternatives; i++)
2187 	{
2188 	  which_alternative = i;
2189 	  recog_data.alternative_enabled_p[i]
2190 	    = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2191 	}
2192     }
2193 
2194   recog_data.insn = NULL;
2195   which_alternative = -1;
2196 }
2197 
2198 /* After calling extract_insn, you can use this function to extract some
2199    information from the constraint strings into a more usable form.
2200    The collected data is stored in recog_op_alt.  */
2201 void
2202 preprocess_constraints (void)
2203 {
2204   int i;
2205 
2206   for (i = 0; i < recog_data.n_operands; i++)
2207     memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2208 				 * sizeof (struct operand_alternative)));
2209 
2210   for (i = 0; i < recog_data.n_operands; i++)
2211     {
2212       int j;
2213       struct operand_alternative *op_alt;
2214       const char *p = recog_data.constraints[i];
2215 
2216       op_alt = recog_op_alt[i];
2217 
2218       for (j = 0; j < recog_data.n_alternatives; j++)
2219 	{
2220 	  op_alt[j].cl = NO_REGS;
2221 	  op_alt[j].constraint = p;
2222 	  op_alt[j].matches = -1;
2223 	  op_alt[j].matched = -1;
2224 
2225 	  if (!recog_data.alternative_enabled_p[j])
2226 	    {
2227 	      p = skip_alternative (p);
2228 	      continue;
2229 	    }
2230 
2231 	  if (*p == '\0' || *p == ',')
2232 	    {
2233 	      op_alt[j].anything_ok = 1;
2234 	      continue;
2235 	    }
2236 
2237 	  for (;;)
2238 	    {
2239 	      char c = *p;
2240 	      if (c == '#')
2241 		do
2242 		  c = *++p;
2243 		while (c != ',' && c != '\0');
2244 	      if (c == ',' || c == '\0')
2245 		{
2246 		  p++;
2247 		  break;
2248 		}
2249 
2250 	      switch (c)
2251 		{
2252 		case '=': case '+': case '*': case '%':
2253 		case 'E': case 'F': case 'G': case 'H':
2254 		case 's': case 'i': case 'n':
2255 		case 'I': case 'J': case 'K': case 'L':
2256 		case 'M': case 'N': case 'O': case 'P':
2257 		  /* These don't say anything we care about.  */
2258 		  break;
2259 
2260 		case '?':
2261 		  op_alt[j].reject += 6;
2262 		  break;
2263 		case '!':
2264 		  op_alt[j].reject += 600;
2265 		  break;
2266 		case '&':
2267 		  op_alt[j].earlyclobber = 1;
2268 		  break;
2269 
2270 		case '0': case '1': case '2': case '3': case '4':
2271 		case '5': case '6': case '7': case '8': case '9':
2272 		  {
2273 		    char *end;
2274 		    op_alt[j].matches = strtoul (p, &end, 10);
2275 		    recog_op_alt[op_alt[j].matches][j].matched = i;
2276 		    p = end;
2277 		  }
2278 		  continue;
2279 
2280 		case TARGET_MEM_CONSTRAINT:
2281 		  op_alt[j].memory_ok = 1;
2282 		  break;
2283 		case '<':
2284 		  op_alt[j].decmem_ok = 1;
2285 		  break;
2286 		case '>':
2287 		  op_alt[j].incmem_ok = 1;
2288 		  break;
2289 		case 'V':
2290 		  op_alt[j].nonoffmem_ok = 1;
2291 		  break;
2292 		case 'o':
2293 		  op_alt[j].offmem_ok = 1;
2294 		  break;
2295 		case 'X':
2296 		  op_alt[j].anything_ok = 1;
2297 		  break;
2298 
2299 		case 'p':
2300 		  op_alt[j].is_address = 1;
2301 		  op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2302 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2303 					     ADDRESS, SCRATCH)];
2304 		  break;
2305 
2306 		case 'g':
2307 		case 'r':
2308 		  op_alt[j].cl =
2309 		   reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2310 		  break;
2311 
2312 		default:
2313 		  if (EXTRA_MEMORY_CONSTRAINT (c, p))
2314 		    {
2315 		      op_alt[j].memory_ok = 1;
2316 		      break;
2317 		    }
2318 		  if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2319 		    {
2320 		      op_alt[j].is_address = 1;
2321 		      op_alt[j].cl
2322 			= (reg_class_subunion
2323 			   [(int) op_alt[j].cl]
2324 			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2325 						  ADDRESS, SCRATCH)]);
2326 		      break;
2327 		    }
2328 
2329 		  op_alt[j].cl
2330 		    = (reg_class_subunion
2331 		       [(int) op_alt[j].cl]
2332 		       [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2333 		  break;
2334 		}
2335 	      p += CONSTRAINT_LEN (c, p);
2336 	    }
2337 	}
2338     }
2339 }
2340 
2341 /* Check the operands of an insn against the insn's operand constraints
2342    and return 1 if they are valid.
2343    The information about the insn's operands, constraints, operand modes
2344    etc. is obtained from the global variables set up by extract_insn.
2345 
2346    WHICH_ALTERNATIVE is set to a number which indicates which
2347    alternative of constraints was matched: 0 for the first alternative,
2348    1 for the next, etc.
2349 
2350    In addition, when two operands are required to match
2351    and it happens that the output operand is (reg) while the
2352    input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2353    make the output operand look like the input.
2354    This is because the output operand is the one the template will print.
2355 
2356    This is used in final, just before printing the assembler code and by
2357    the routines that determine an insn's attribute.
2358 
2359    If STRICT is a positive nonzero value, it means that we have been
2360    called after reload has been completed.  In that case, we must
2361    do all checks strictly.  If it is zero, it means that we have been called
2362    before reload has completed.  In that case, we first try to see if we can
2363    find an alternative that matches strictly.  If not, we try again, this
2364    time assuming that reload will fix up the insn.  This provides a "best
2365    guess" for the alternative and is used to compute attributes of insns prior
2366    to reload.  A negative value of STRICT is used for this internal call.  */
2367 
2368 struct funny_match
2369 {
2370   int this_op, other;
2371 };
2372 
2373 int
2374 constrain_operands (int strict)
2375 {
2376   const char *constraints[MAX_RECOG_OPERANDS];
2377   int matching_operands[MAX_RECOG_OPERANDS];
2378   int earlyclobber[MAX_RECOG_OPERANDS];
2379   int c;
2380 
2381   struct funny_match funny_match[MAX_RECOG_OPERANDS];
2382   int funny_match_index;
2383 
2384   which_alternative = 0;
2385   if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2386     return 1;
2387 
2388   for (c = 0; c < recog_data.n_operands; c++)
2389     {
2390       constraints[c] = recog_data.constraints[c];
2391       matching_operands[c] = -1;
2392     }
2393 
2394   do
2395     {
2396       int seen_earlyclobber_at = -1;
2397       int opno;
2398       int lose = 0;
2399       funny_match_index = 0;
2400 
2401       if (!recog_data.alternative_enabled_p[which_alternative])
2402 	{
2403 	  int i;
2404 
2405 	  for (i = 0; i < recog_data.n_operands; i++)
2406 	    constraints[i] = skip_alternative (constraints[i]);
2407 
2408 	  which_alternative++;
2409 	  continue;
2410 	}
2411 
2412       for (opno = 0; opno < recog_data.n_operands; opno++)
2413 	{
2414 	  rtx op = recog_data.operand[opno];
2415 	  enum machine_mode mode = GET_MODE (op);
2416 	  const char *p = constraints[opno];
2417 	  int offset = 0;
2418 	  int win = 0;
2419 	  int val;
2420 	  int len;
2421 
2422 	  earlyclobber[opno] = 0;
2423 
2424 	  /* A unary operator may be accepted by the predicate, but it
2425 	     is irrelevant for matching constraints.  */
2426 	  if (UNARY_P (op))
2427 	    op = XEXP (op, 0);
2428 
2429 	  if (GET_CODE (op) == SUBREG)
2430 	    {
2431 	      if (REG_P (SUBREG_REG (op))
2432 		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2433 		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2434 					      GET_MODE (SUBREG_REG (op)),
2435 					      SUBREG_BYTE (op),
2436 					      GET_MODE (op));
2437 	      op = SUBREG_REG (op);
2438 	    }
2439 
2440 	  /* An empty constraint or empty alternative
2441 	     allows anything which matched the pattern.  */
2442 	  if (*p == 0 || *p == ',')
2443 	    win = 1;
2444 
2445 	  do
2446 	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2447 	      {
2448 	      case '\0':
2449 		len = 0;
2450 		break;
2451 	      case ',':
2452 		c = '\0';
2453 		break;
2454 
2455 	      case '?':  case '!': case '*':  case '%':
2456 	      case '=':  case '+':
2457 		break;
2458 
2459 	      case '#':
2460 		/* Ignore rest of this alternative as far as
2461 		   constraint checking is concerned.  */
2462 		do
2463 		  p++;
2464 		while (*p && *p != ',');
2465 		len = 0;
2466 		break;
2467 
2468 	      case '&':
2469 		earlyclobber[opno] = 1;
2470 		if (seen_earlyclobber_at < 0)
2471 		  seen_earlyclobber_at = opno;
2472 		break;
2473 
2474 	      case '0':  case '1':  case '2':  case '3':  case '4':
2475 	      case '5':  case '6':  case '7':  case '8':  case '9':
2476 		{
2477 		  /* This operand must be the same as a previous one.
2478 		     This kind of constraint is used for instructions such
2479 		     as add when they take only two operands.
2480 
2481 		     Note that the lower-numbered operand is passed first.
2482 
2483 		     If we are not testing strictly, assume that this
2484 		     constraint will be satisfied.  */
2485 
2486 		  char *end;
2487 		  int match;
2488 
2489 		  match = strtoul (p, &end, 10);
2490 		  p = end;
2491 
2492 		  if (strict < 0)
2493 		    val = 1;
2494 		  else
2495 		    {
2496 		      rtx op1 = recog_data.operand[match];
2497 		      rtx op2 = recog_data.operand[opno];
2498 
2499 		      /* A unary operator may be accepted by the predicate,
2500 			 but it is irrelevant for matching constraints.  */
2501 		      if (UNARY_P (op1))
2502 			op1 = XEXP (op1, 0);
2503 		      if (UNARY_P (op2))
2504 			op2 = XEXP (op2, 0);
2505 
2506 		      val = operands_match_p (op1, op2);
2507 		    }
2508 
2509 		  matching_operands[opno] = match;
2510 		  matching_operands[match] = opno;
2511 
2512 		  if (val != 0)
2513 		    win = 1;
2514 
2515 		  /* If output is *x and input is *--x, arrange later
2516 		     to change the output to *--x as well, since the
2517 		     output op is the one that will be printed.  */
2518 		  if (val == 2 && strict > 0)
2519 		    {
2520 		      funny_match[funny_match_index].this_op = opno;
2521 		      funny_match[funny_match_index++].other = match;
2522 		    }
2523 		}
2524 		len = 0;
2525 		break;
2526 
2527 	      case 'p':
2528 		/* p is used for address_operands.  When we are called by
2529 		   gen_reload, no one will have checked that the address is
2530 		   strictly valid, i.e., that all pseudos requiring hard regs
2531 		   have gotten them.  */
2532 		if (strict <= 0
2533 		    || (strict_memory_address_p (recog_data.operand_mode[opno],
2534 						 op)))
2535 		  win = 1;
2536 		break;
2537 
2538 		/* No need to check general_operand again;
2539 		   it was done in insn-recog.c.  Well, except that reload
2540 		   doesn't check the validity of its replacements, but
2541 		   that should only matter when there's a bug.  */
2542 	      case 'g':
2543 		/* Anything goes unless it is a REG and really has a hard reg
2544 		   but the hard reg is not in the class GENERAL_REGS.  */
2545 		if (REG_P (op))
2546 		  {
2547 		    if (strict < 0
2548 			|| GENERAL_REGS == ALL_REGS
2549 			|| (reload_in_progress
2550 			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2551 			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2552 		      win = 1;
2553 		  }
2554 		else if (strict < 0 || general_operand (op, mode))
2555 		  win = 1;
2556 		break;
2557 
2558 	      case 'X':
2559 		/* This is used for a MATCH_SCRATCH in the cases when
2560 		   we don't actually need anything.  So anything goes
2561 		   any time.  */
2562 		win = 1;
2563 		break;
2564 
2565 	      case TARGET_MEM_CONSTRAINT:
2566 		/* Memory operands must be valid, to the extent
2567 		   required by STRICT.  */
2568 		if (MEM_P (op))
2569 		  {
2570 		    if (strict > 0
2571 			&& !strict_memory_address_addr_space_p
2572 			     (GET_MODE (op), XEXP (op, 0),
2573 			      MEM_ADDR_SPACE (op)))
2574 		      break;
2575 		    if (strict == 0
2576 			&& !memory_address_addr_space_p
2577 			     (GET_MODE (op), XEXP (op, 0),
2578 			      MEM_ADDR_SPACE (op)))
2579 		      break;
2580 		    win = 1;
2581 		  }
2582 		/* Before reload, accept what reload can turn into mem.  */
2583 		else if (strict < 0 && CONSTANT_P (op))
2584 		  win = 1;
2585 		/* During reload, accept a pseudo  */
2586 		else if (reload_in_progress && REG_P (op)
2587 			 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2588 		  win = 1;
2589 		break;
2590 
2591 	      case '<':
2592 		if (MEM_P (op)
2593 		    && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2594 			|| GET_CODE (XEXP (op, 0)) == POST_DEC))
2595 		  win = 1;
2596 		break;
2597 
2598 	      case '>':
2599 		if (MEM_P (op)
2600 		    && (GET_CODE (XEXP (op, 0)) == PRE_INC
2601 			|| GET_CODE (XEXP (op, 0)) == POST_INC))
2602 		  win = 1;
2603 		break;
2604 
2605 	      case 'E':
2606 	      case 'F':
2607 		if (CONST_DOUBLE_AS_FLOAT_P (op)
2608 		    || (GET_CODE (op) == CONST_VECTOR
2609 			&& GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2610 		  win = 1;
2611 		break;
2612 
2613 	      case 'G':
2614 	      case 'H':
2615 		if (CONST_DOUBLE_AS_FLOAT_P (op)
2616 		    && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2617 		  win = 1;
2618 		break;
2619 
2620 	      case 's':
2621 		if (CONST_SCALAR_INT_P (op))
2622 		  break;
2623 	      case 'i':
2624 		if (CONSTANT_P (op))
2625 		  win = 1;
2626 		break;
2627 
2628 	      case 'n':
2629 		if (CONST_SCALAR_INT_P (op))
2630 		  win = 1;
2631 		break;
2632 
2633 	      case 'I':
2634 	      case 'J':
2635 	      case 'K':
2636 	      case 'L':
2637 	      case 'M':
2638 	      case 'N':
2639 	      case 'O':
2640 	      case 'P':
2641 		if (CONST_INT_P (op)
2642 		    && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2643 		  win = 1;
2644 		break;
2645 
2646 	      case 'V':
2647 		if (MEM_P (op)
2648 		    && ((strict > 0 && ! offsettable_memref_p (op))
2649 			|| (strict < 0
2650 			    && !(CONSTANT_P (op) || MEM_P (op)))
2651 			|| (reload_in_progress
2652 			    && !(REG_P (op)
2653 				 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2654 		  win = 1;
2655 		break;
2656 
2657 	      case 'o':
2658 		if ((strict > 0 && offsettable_memref_p (op))
2659 		    || (strict == 0 && offsettable_nonstrict_memref_p (op))
2660 		    /* Before reload, accept what reload can handle.  */
2661 		    || (strict < 0
2662 			&& (CONSTANT_P (op) || MEM_P (op)))
2663 		    /* During reload, accept a pseudo  */
2664 		    || (reload_in_progress && REG_P (op)
2665 			&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
2666 		  win = 1;
2667 		break;
2668 
2669 	      default:
2670 		{
2671 		  enum reg_class cl;
2672 
2673 		  cl = (c == 'r'
2674 			   ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2675 		  if (cl != NO_REGS)
2676 		    {
2677 		      if (strict < 0
2678 			  || (strict == 0
2679 			      && REG_P (op)
2680 			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2681 			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2682 			  || (REG_P (op)
2683 			      && reg_fits_class_p (op, cl, offset, mode)))
2684 		        win = 1;
2685 		    }
2686 #ifdef EXTRA_CONSTRAINT_STR
2687 		  else if (EXTRA_CONSTRAINT_STR (op, c, p))
2688 		    win = 1;
2689 
2690 		  else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2691 			   /* Every memory operand can be reloaded to fit.  */
2692 			   && ((strict < 0 && MEM_P (op))
2693 			       /* Before reload, accept what reload can turn
2694 				  into mem.  */
2695 			       || (strict < 0 && CONSTANT_P (op))
2696 			       /* During reload, accept a pseudo  */
2697 			       || (reload_in_progress && REG_P (op)
2698 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2699 		    win = 1;
2700 		  else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2701 			   /* Every address operand can be reloaded to fit.  */
2702 			   && strict < 0)
2703 		    win = 1;
2704 		  /* Cater to architectures like IA-64 that define extra memory
2705 		     constraints without using define_memory_constraint.  */
2706 		  else if (reload_in_progress
2707 			   && REG_P (op)
2708 			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
2709 			   && reg_renumber[REGNO (op)] < 0
2710 			   && reg_equiv_mem (REGNO (op)) != 0
2711 			   && EXTRA_CONSTRAINT_STR
2712 			      (reg_equiv_mem (REGNO (op)), c, p))
2713 		    win = 1;
2714 #endif
2715 		  break;
2716 		}
2717 	      }
2718 	  while (p += len, c);
2719 
2720 	  constraints[opno] = p;
2721 	  /* If this operand did not win somehow,
2722 	     this alternative loses.  */
2723 	  if (! win)
2724 	    lose = 1;
2725 	}
2726       /* This alternative won; the operands are ok.
2727 	 Change whichever operands this alternative says to change.  */
2728       if (! lose)
2729 	{
2730 	  int opno, eopno;
2731 
2732 	  /* See if any earlyclobber operand conflicts with some other
2733 	     operand.  */
2734 
2735 	  if (strict > 0  && seen_earlyclobber_at >= 0)
2736 	    for (eopno = seen_earlyclobber_at;
2737 		 eopno < recog_data.n_operands;
2738 		 eopno++)
2739 	      /* Ignore earlyclobber operands now in memory,
2740 		 because we would often report failure when we have
2741 		 two memory operands, one of which was formerly a REG.  */
2742 	      if (earlyclobber[eopno]
2743 		  && REG_P (recog_data.operand[eopno]))
2744 		for (opno = 0; opno < recog_data.n_operands; opno++)
2745 		  if ((MEM_P (recog_data.operand[opno])
2746 		       || recog_data.operand_type[opno] != OP_OUT)
2747 		      && opno != eopno
2748 		      /* Ignore things like match_operator operands.  */
2749 		      && *recog_data.constraints[opno] != 0
2750 		      && ! (matching_operands[opno] == eopno
2751 			    && operands_match_p (recog_data.operand[opno],
2752 						 recog_data.operand[eopno]))
2753 		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2754 						   recog_data.operand[eopno]))
2755 		    lose = 1;
2756 
2757 	  if (! lose)
2758 	    {
2759 	      while (--funny_match_index >= 0)
2760 		{
2761 		  recog_data.operand[funny_match[funny_match_index].other]
2762 		    = recog_data.operand[funny_match[funny_match_index].this_op];
2763 		}
2764 
2765 #ifdef AUTO_INC_DEC
2766 	      /* For operands without < or > constraints reject side-effects.  */
2767 	      if (recog_data.is_asm)
2768 		{
2769 		  for (opno = 0; opno < recog_data.n_operands; opno++)
2770 		    if (MEM_P (recog_data.operand[opno]))
2771 		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2772 			{
2773 			case PRE_INC:
2774 			case POST_INC:
2775 			case PRE_DEC:
2776 			case POST_DEC:
2777 			case PRE_MODIFY:
2778 			case POST_MODIFY:
2779 			  if (strchr (recog_data.constraints[opno], '<') == NULL
2780 			      && strchr (recog_data.constraints[opno], '>')
2781 				 == NULL)
2782 			    return 0;
2783 			  break;
2784 			default:
2785 			  break;
2786 			}
2787 		}
2788 #endif
2789 	      return 1;
2790 	    }
2791 	}
2792 
2793       which_alternative++;
2794     }
2795   while (which_alternative < recog_data.n_alternatives);
2796 
2797   which_alternative = -1;
2798   /* If we are about to reject this, but we are not to test strictly,
2799      try a very loose test.  Only return failure if it fails also.  */
2800   if (strict == 0)
2801     return constrain_operands (-1);
2802   else
2803     return 0;
2804 }
2805 
2806 /* Return true iff OPERAND (assumed to be a REG rtx)
2807    is a hard reg in class CLASS when its regno is offset by OFFSET
2808    and changed to mode MODE.
2809    If REG occupies multiple hard regs, all of them must be in CLASS.  */
2810 
2811 bool
2812 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2813 		  enum machine_mode mode)
2814 {
2815   unsigned int regno = REGNO (operand);
2816 
2817   if (cl == NO_REGS)
2818     return false;
2819 
2820   /* Regno must not be a pseudo register.  Offset may be negative.  */
2821   return (HARD_REGISTER_NUM_P (regno)
2822 	  && HARD_REGISTER_NUM_P (regno + offset)
2823 	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2824 				regno + offset));
2825 }
2826 
2827 /* Split single instruction.  Helper function for split_all_insns and
2828    split_all_insns_noflow.  Return last insn in the sequence if successful,
2829    or NULL if unsuccessful.  */
2830 
2831 static rtx
2832 split_insn (rtx insn)
2833 {
2834   /* Split insns here to get max fine-grain parallelism.  */
2835   rtx first = PREV_INSN (insn);
2836   rtx last = try_split (PATTERN (insn), insn, 1);
2837   rtx insn_set, last_set, note;
2838 
2839   if (last == insn)
2840     return NULL_RTX;
2841 
2842   /* If the original instruction was a single set that was known to be
2843      equivalent to a constant, see if we can say the same about the last
2844      instruction in the split sequence.  The two instructions must set
2845      the same destination.  */
2846   insn_set = single_set (insn);
2847   if (insn_set)
2848     {
2849       last_set = single_set (last);
2850       if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2851 	{
2852 	  note = find_reg_equal_equiv_note (insn);
2853 	  if (note && CONSTANT_P (XEXP (note, 0)))
2854 	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2855 	  else if (CONSTANT_P (SET_SRC (insn_set)))
2856 	    set_unique_reg_note (last, REG_EQUAL,
2857 				 copy_rtx (SET_SRC (insn_set)));
2858 	}
2859     }
2860 
2861   /* try_split returns the NOTE that INSN became.  */
2862   SET_INSN_DELETED (insn);
2863 
2864   /* ??? Coddle to md files that generate subregs in post-reload
2865      splitters instead of computing the proper hard register.  */
2866   if (reload_completed && first != last)
2867     {
2868       first = NEXT_INSN (first);
2869       for (;;)
2870 	{
2871 	  if (INSN_P (first))
2872 	    cleanup_subreg_operands (first);
2873 	  if (first == last)
2874 	    break;
2875 	  first = NEXT_INSN (first);
2876 	}
2877     }
2878 
2879   return last;
2880 }
2881 
2882 /* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2883 
2884 void
2885 split_all_insns (void)
2886 {
2887   sbitmap blocks;
2888   bool changed;
2889   basic_block bb;
2890 
2891   blocks = sbitmap_alloc (last_basic_block);
2892   bitmap_clear (blocks);
2893   changed = false;
2894 
2895   FOR_EACH_BB_REVERSE (bb)
2896     {
2897       rtx insn, next;
2898       bool finish = false;
2899 
2900       rtl_profile_for_bb (bb);
2901       for (insn = BB_HEAD (bb); !finish ; insn = next)
2902 	{
2903 	  /* Can't use `next_real_insn' because that might go across
2904 	     CODE_LABELS and short-out basic blocks.  */
2905 	  next = NEXT_INSN (insn);
2906 	  finish = (insn == BB_END (bb));
2907 	  if (INSN_P (insn))
2908 	    {
2909 	      rtx set = single_set (insn);
2910 
2911 	      /* Don't split no-op move insns.  These should silently
2912 		 disappear later in final.  Splitting such insns would
2913 		 break the code that handles LIBCALL blocks.  */
2914 	      if (set && set_noop_p (set))
2915 		{
2916 		  /* Nops get in the way while scheduling, so delete them
2917 		     now if register allocation has already been done.  It
2918 		     is too risky to try to do this before register
2919 		     allocation, and there are unlikely to be very many
2920 		     nops then anyways.  */
2921 		  if (reload_completed)
2922 		      delete_insn_and_edges (insn);
2923 		}
2924 	      else
2925 		{
2926 		  if (split_insn (insn))
2927 		    {
2928 		      bitmap_set_bit (blocks, bb->index);
2929 		      changed = true;
2930 		    }
2931 		}
2932 	    }
2933 	}
2934     }
2935 
2936   default_rtl_profile ();
2937   if (changed)
2938     find_many_sub_basic_blocks (blocks);
2939 
2940 #ifdef ENABLE_CHECKING
2941   verify_flow_info ();
2942 #endif
2943 
2944   sbitmap_free (blocks);
2945 }
2946 
2947 /* Same as split_all_insns, but do not expect CFG to be available.
2948    Used by machine dependent reorg passes.  */
2949 
2950 unsigned int
2951 split_all_insns_noflow (void)
2952 {
2953   rtx next, insn;
2954 
2955   for (insn = get_insns (); insn; insn = next)
2956     {
2957       next = NEXT_INSN (insn);
2958       if (INSN_P (insn))
2959 	{
2960 	  /* Don't split no-op move insns.  These should silently
2961 	     disappear later in final.  Splitting such insns would
2962 	     break the code that handles LIBCALL blocks.  */
2963 	  rtx set = single_set (insn);
2964 	  if (set && set_noop_p (set))
2965 	    {
2966 	      /* Nops get in the way while scheduling, so delete them
2967 		 now if register allocation has already been done.  It
2968 		 is too risky to try to do this before register
2969 		 allocation, and there are unlikely to be very many
2970 		 nops then anyways.
2971 
2972 		 ??? Should we use delete_insn when the CFG isn't valid?  */
2973 	      if (reload_completed)
2974 		delete_insn_and_edges (insn);
2975 	    }
2976 	  else
2977 	    split_insn (insn);
2978 	}
2979     }
2980   return 0;
2981 }
2982 
2983 #ifdef HAVE_peephole2
2984 struct peep2_insn_data
2985 {
2986   rtx insn;
2987   regset live_before;
2988 };
2989 
2990 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2991 static int peep2_current;
2992 
2993 static bool peep2_do_rebuild_jump_labels;
2994 static bool peep2_do_cleanup_cfg;
2995 
2996 /* The number of instructions available to match a peep2.  */
2997 int peep2_current_count;
2998 
2999 /* A non-insn marker indicating the last insn of the block.
3000    The live_before regset for this element is correct, indicating
3001    DF_LIVE_OUT for the block.  */
3002 #define PEEP2_EOB	pc_rtx
3003 
3004 /* Wrap N to fit into the peep2_insn_data buffer.  */
3005 
3006 static int
3007 peep2_buf_position (int n)
3008 {
3009   if (n >= MAX_INSNS_PER_PEEP2 + 1)
3010     n -= MAX_INSNS_PER_PEEP2 + 1;
3011   return n;
3012 }
3013 
3014 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3015    does not exist.  Used by the recognizer to find the next insn to match
3016    in a multi-insn pattern.  */
3017 
3018 rtx
3019 peep2_next_insn (int n)
3020 {
3021   gcc_assert (n <= peep2_current_count);
3022 
3023   n = peep2_buf_position (peep2_current + n);
3024 
3025   return peep2_insn_data[n].insn;
3026 }
3027 
3028 /* Return true if REGNO is dead before the Nth non-note insn
3029    after `current'.  */
3030 
3031 int
3032 peep2_regno_dead_p (int ofs, int regno)
3033 {
3034   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3035 
3036   ofs = peep2_buf_position (peep2_current + ofs);
3037 
3038   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3039 
3040   return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3041 }
3042 
3043 /* Similarly for a REG.  */
3044 
3045 int
3046 peep2_reg_dead_p (int ofs, rtx reg)
3047 {
3048   int regno, n;
3049 
3050   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3051 
3052   ofs = peep2_buf_position (peep2_current + ofs);
3053 
3054   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3055 
3056   regno = REGNO (reg);
3057   n = hard_regno_nregs[regno][GET_MODE (reg)];
3058   while (--n >= 0)
3059     if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3060       return 0;
3061   return 1;
3062 }
3063 
3064 /* Regno offset to be used in the register search.  */
3065 static int search_ofs;
3066 
3067 /* Try to find a hard register of mode MODE, matching the register class in
3068    CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3069    remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3070    in which case the only condition is that the register must be available
3071    before CURRENT_INSN.
3072    Registers that already have bits set in REG_SET will not be considered.
3073 
3074    If an appropriate register is available, it will be returned and the
3075    corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3076    returned.  */
3077 
3078 rtx
3079 peep2_find_free_register (int from, int to, const char *class_str,
3080 			  enum machine_mode mode, HARD_REG_SET *reg_set)
3081 {
3082   enum reg_class cl;
3083   HARD_REG_SET live;
3084   df_ref *def_rec;
3085   int i;
3086 
3087   gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3088   gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3089 
3090   from = peep2_buf_position (peep2_current + from);
3091   to = peep2_buf_position (peep2_current + to);
3092 
3093   gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3094   REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3095 
3096   while (from != to)
3097     {
3098       gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3099 
3100       /* Don't use registers set or clobbered by the insn.  */
3101       for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3102 	   *def_rec; def_rec++)
3103 	SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3104 
3105       from = peep2_buf_position (from + 1);
3106     }
3107 
3108   cl = (class_str[0] == 'r' ? GENERAL_REGS
3109 	   : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3110 
3111   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3112     {
3113       int raw_regno, regno, success, j;
3114 
3115       /* Distribute the free registers as much as possible.  */
3116       raw_regno = search_ofs + i;
3117       if (raw_regno >= FIRST_PSEUDO_REGISTER)
3118 	raw_regno -= FIRST_PSEUDO_REGISTER;
3119 #ifdef REG_ALLOC_ORDER
3120       regno = reg_alloc_order[raw_regno];
3121 #else
3122       regno = raw_regno;
3123 #endif
3124 
3125       /* Can it support the mode we need?  */
3126       if (! HARD_REGNO_MODE_OK (regno, mode))
3127 	continue;
3128 
3129       success = 1;
3130       for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3131 	{
3132 	  /* Don't allocate fixed registers.  */
3133 	  if (fixed_regs[regno + j])
3134 	    {
3135 	      success = 0;
3136 	      break;
3137 	    }
3138 	  /* Don't allocate global registers.  */
3139 	  if (global_regs[regno + j])
3140 	    {
3141 	      success = 0;
3142 	      break;
3143 	    }
3144 	  /* Make sure the register is of the right class.  */
3145 	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3146 	    {
3147 	      success = 0;
3148 	      break;
3149 	    }
3150 	  /* And that we don't create an extra save/restore.  */
3151 	  if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3152 	    {
3153 	      success = 0;
3154 	      break;
3155 	    }
3156 
3157 	  if (! targetm.hard_regno_scratch_ok (regno + j))
3158 	    {
3159 	      success = 0;
3160 	      break;
3161 	    }
3162 
3163 	  /* And we don't clobber traceback for noreturn functions.  */
3164 	  if ((regno + j == FRAME_POINTER_REGNUM
3165 	       || regno + j == HARD_FRAME_POINTER_REGNUM)
3166 	      && (! reload_completed || frame_pointer_needed))
3167 	    {
3168 	      success = 0;
3169 	      break;
3170 	    }
3171 
3172 	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3173 	      || TEST_HARD_REG_BIT (live, regno + j))
3174 	    {
3175 	      success = 0;
3176 	      break;
3177 	    }
3178 	}
3179 
3180       if (success)
3181 	{
3182 	  add_to_hard_reg_set (reg_set, mode, regno);
3183 
3184 	  /* Start the next search with the next register.  */
3185 	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3186 	    raw_regno = 0;
3187 	  search_ofs = raw_regno;
3188 
3189 	  return gen_rtx_REG (mode, regno);
3190 	}
3191     }
3192 
3193   search_ofs = 0;
3194   return NULL_RTX;
3195 }
3196 
3197 /* Forget all currently tracked instructions, only remember current
3198    LIVE regset.  */
3199 
3200 static void
3201 peep2_reinit_state (regset live)
3202 {
3203   int i;
3204 
3205   /* Indicate that all slots except the last holds invalid data.  */
3206   for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3207     peep2_insn_data[i].insn = NULL_RTX;
3208   peep2_current_count = 0;
3209 
3210   /* Indicate that the last slot contains live_after data.  */
3211   peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3212   peep2_current = MAX_INSNS_PER_PEEP2;
3213 
3214   COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3215 }
3216 
3217 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3218    starting at INSN.  Perform the replacement, removing the old insns and
3219    replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3220    if the replacement is rejected.  */
3221 
3222 static rtx
3223 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3224 {
3225   int i;
3226   rtx last, eh_note, as_note, before_try, x;
3227   rtx old_insn, new_insn;
3228   bool was_call = false;
3229 
3230   /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3231      match more than one insn, or to be split into more than one insn.  */
3232   old_insn = peep2_insn_data[peep2_current].insn;
3233   if (RTX_FRAME_RELATED_P (old_insn))
3234     {
3235       bool any_note = false;
3236       rtx note;
3237 
3238       if (match_len != 0)
3239 	return NULL;
3240 
3241       /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3242 	 may be in the stream for the purpose of register allocation.  */
3243       if (active_insn_p (attempt))
3244 	new_insn = attempt;
3245       else
3246 	new_insn = next_active_insn (attempt);
3247       if (next_active_insn (new_insn))
3248 	return NULL;
3249 
3250       /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3251       RTX_FRAME_RELATED_P (new_insn) = 1;
3252 
3253       /* Allow the backend to fill in a note during the split.  */
3254       for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3255 	switch (REG_NOTE_KIND (note))
3256 	  {
3257 	  case REG_FRAME_RELATED_EXPR:
3258 	  case REG_CFA_DEF_CFA:
3259 	  case REG_CFA_ADJUST_CFA:
3260 	  case REG_CFA_OFFSET:
3261 	  case REG_CFA_REGISTER:
3262 	  case REG_CFA_EXPRESSION:
3263 	  case REG_CFA_RESTORE:
3264 	  case REG_CFA_SET_VDRAP:
3265 	    any_note = true;
3266 	    break;
3267 	  default:
3268 	    break;
3269 	  }
3270 
3271       /* If the backend didn't supply a note, copy one over.  */
3272       if (!any_note)
3273         for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3274 	  switch (REG_NOTE_KIND (note))
3275 	    {
3276 	    case REG_FRAME_RELATED_EXPR:
3277 	    case REG_CFA_DEF_CFA:
3278 	    case REG_CFA_ADJUST_CFA:
3279 	    case REG_CFA_OFFSET:
3280 	    case REG_CFA_REGISTER:
3281 	    case REG_CFA_EXPRESSION:
3282 	    case REG_CFA_RESTORE:
3283 	    case REG_CFA_SET_VDRAP:
3284 	      add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3285 	      any_note = true;
3286 	      break;
3287 	    default:
3288 	      break;
3289 	    }
3290 
3291       /* If there still isn't a note, make sure the unwind info sees the
3292 	 same expression as before the split.  */
3293       if (!any_note)
3294 	{
3295 	  rtx old_set, new_set;
3296 
3297 	  /* The old insn had better have been simple, or annotated.  */
3298 	  old_set = single_set (old_insn);
3299 	  gcc_assert (old_set != NULL);
3300 
3301 	  new_set = single_set (new_insn);
3302 	  if (!new_set || !rtx_equal_p (new_set, old_set))
3303 	    add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3304 	}
3305 
3306       /* Copy prologue/epilogue status.  This is required in order to keep
3307 	 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3308       maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3309     }
3310 
3311   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3312      in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3313      cfg-related call notes.  */
3314   for (i = 0; i <= match_len; ++i)
3315     {
3316       int j;
3317       rtx note;
3318 
3319       j = peep2_buf_position (peep2_current + i);
3320       old_insn = peep2_insn_data[j].insn;
3321       if (!CALL_P (old_insn))
3322 	continue;
3323       was_call = true;
3324 
3325       new_insn = attempt;
3326       while (new_insn != NULL_RTX)
3327 	{
3328 	  if (CALL_P (new_insn))
3329 	    break;
3330 	  new_insn = NEXT_INSN (new_insn);
3331 	}
3332 
3333       gcc_assert (new_insn != NULL_RTX);
3334 
3335       CALL_INSN_FUNCTION_USAGE (new_insn)
3336 	= CALL_INSN_FUNCTION_USAGE (old_insn);
3337 
3338       for (note = REG_NOTES (old_insn);
3339 	   note;
3340 	   note = XEXP (note, 1))
3341 	switch (REG_NOTE_KIND (note))
3342 	  {
3343 	  case REG_NORETURN:
3344 	  case REG_SETJMP:
3345 	  case REG_TM:
3346 	    add_reg_note (new_insn, REG_NOTE_KIND (note),
3347 			  XEXP (note, 0));
3348 	    break;
3349 	  default:
3350 	    /* Discard all other reg notes.  */
3351 	    break;
3352 	  }
3353 
3354       /* Croak if there is another call in the sequence.  */
3355       while (++i <= match_len)
3356 	{
3357 	  j = peep2_buf_position (peep2_current + i);
3358 	  old_insn = peep2_insn_data[j].insn;
3359 	  gcc_assert (!CALL_P (old_insn));
3360 	}
3361       break;
3362     }
3363 
3364   /* If we matched any instruction that had a REG_ARGS_SIZE, then
3365      move those notes over to the new sequence.  */
3366   as_note = NULL;
3367   for (i = match_len; i >= 0; --i)
3368     {
3369       int j = peep2_buf_position (peep2_current + i);
3370       old_insn = peep2_insn_data[j].insn;
3371 
3372       as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3373       if (as_note)
3374 	break;
3375     }
3376 
3377   i = peep2_buf_position (peep2_current + match_len);
3378   eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3379 
3380   /* Replace the old sequence with the new.  */
3381   last = emit_insn_after_setloc (attempt,
3382 				 peep2_insn_data[i].insn,
3383 				 INSN_LOCATION (peep2_insn_data[i].insn));
3384   before_try = PREV_INSN (insn);
3385   delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3386 
3387   /* Re-insert the EH_REGION notes.  */
3388   if (eh_note || (was_call && nonlocal_goto_handler_labels))
3389     {
3390       edge eh_edge;
3391       edge_iterator ei;
3392 
3393       FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3394 	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3395 	  break;
3396 
3397       if (eh_note)
3398 	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3399 
3400       if (eh_edge)
3401 	for (x = last; x != before_try; x = PREV_INSN (x))
3402 	  if (x != BB_END (bb)
3403 	      && (can_throw_internal (x)
3404 		  || can_nonlocal_goto (x)))
3405 	    {
3406 	      edge nfte, nehe;
3407 	      int flags;
3408 
3409 	      nfte = split_block (bb, x);
3410 	      flags = (eh_edge->flags
3411 		       & (EDGE_EH | EDGE_ABNORMAL));
3412 	      if (CALL_P (x))
3413 		flags |= EDGE_ABNORMAL_CALL;
3414 	      nehe = make_edge (nfte->src, eh_edge->dest,
3415 				flags);
3416 
3417 	      nehe->probability = eh_edge->probability;
3418 	      nfte->probability
3419 		= REG_BR_PROB_BASE - nehe->probability;
3420 
3421 	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3422 	      bb = nfte->src;
3423 	      eh_edge = nehe;
3424 	    }
3425 
3426       /* Converting possibly trapping insn to non-trapping is
3427 	 possible.  Zap dummy outgoing edges.  */
3428       peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3429     }
3430 
3431   /* Re-insert the ARGS_SIZE notes.  */
3432   if (as_note)
3433     fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3434 
3435   /* If we generated a jump instruction, it won't have
3436      JUMP_LABEL set.  Recompute after we're done.  */
3437   for (x = last; x != before_try; x = PREV_INSN (x))
3438     if (JUMP_P (x))
3439       {
3440 	peep2_do_rebuild_jump_labels = true;
3441 	break;
3442       }
3443 
3444   return last;
3445 }
3446 
3447 /* After performing a replacement in basic block BB, fix up the life
3448    information in our buffer.  LAST is the last of the insns that we
3449    emitted as a replacement.  PREV is the insn before the start of
3450    the replacement.  MATCH_LEN is the number of instructions that were
3451    matched, and which now need to be replaced in the buffer.  */
3452 
3453 static void
3454 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3455 {
3456   int i = peep2_buf_position (peep2_current + match_len + 1);
3457   rtx x;
3458   regset_head live;
3459 
3460   INIT_REG_SET (&live);
3461   COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3462 
3463   gcc_assert (peep2_current_count >= match_len + 1);
3464   peep2_current_count -= match_len + 1;
3465 
3466   x = last;
3467   do
3468     {
3469       if (INSN_P (x))
3470 	{
3471 	  df_insn_rescan (x);
3472 	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3473 	    {
3474 	      peep2_current_count++;
3475 	      if (--i < 0)
3476 		i = MAX_INSNS_PER_PEEP2;
3477 	      peep2_insn_data[i].insn = x;
3478 	      df_simulate_one_insn_backwards (bb, x, &live);
3479 	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3480 	    }
3481 	}
3482       x = PREV_INSN (x);
3483     }
3484   while (x != prev);
3485   CLEAR_REG_SET (&live);
3486 
3487   peep2_current = i;
3488 }
3489 
3490 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3491    Return true if we added it, false otherwise.  The caller will try to match
3492    peepholes against the buffer if we return false; otherwise it will try to
3493    add more instructions to the buffer.  */
3494 
3495 static bool
3496 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3497 {
3498   int pos;
3499 
3500   /* Once we have filled the maximum number of insns the buffer can hold,
3501      allow the caller to match the insns against peepholes.  We wait until
3502      the buffer is full in case the target has similar peepholes of different
3503      length; we always want to match the longest if possible.  */
3504   if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3505     return false;
3506 
3507   /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3508      any other pattern, lest it change the semantics of the frame info.  */
3509   if (RTX_FRAME_RELATED_P (insn))
3510     {
3511       /* Let the buffer drain first.  */
3512       if (peep2_current_count > 0)
3513 	return false;
3514       /* Now the insn will be the only thing in the buffer.  */
3515     }
3516 
3517   pos = peep2_buf_position (peep2_current + peep2_current_count);
3518   peep2_insn_data[pos].insn = insn;
3519   COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3520   peep2_current_count++;
3521 
3522   df_simulate_one_insn_forwards (bb, insn, live);
3523   return true;
3524 }
3525 
3526 /* Perform the peephole2 optimization pass.  */
3527 
3528 static void
3529 peephole2_optimize (void)
3530 {
3531   rtx insn;
3532   bitmap live;
3533   int i;
3534   basic_block bb;
3535 
3536   peep2_do_cleanup_cfg = false;
3537   peep2_do_rebuild_jump_labels = false;
3538 
3539   df_set_flags (DF_LR_RUN_DCE);
3540   df_note_add_problem ();
3541   df_analyze ();
3542 
3543   /* Initialize the regsets we're going to use.  */
3544   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3545     peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3546   search_ofs = 0;
3547   live = BITMAP_ALLOC (&reg_obstack);
3548 
3549   FOR_EACH_BB_REVERSE (bb)
3550     {
3551       bool past_end = false;
3552       int pos;
3553 
3554       rtl_profile_for_bb (bb);
3555 
3556       /* Start up propagation.  */
3557       bitmap_copy (live, DF_LR_IN (bb));
3558       df_simulate_initialize_forwards (bb, live);
3559       peep2_reinit_state (live);
3560 
3561       insn = BB_HEAD (bb);
3562       for (;;)
3563 	{
3564 	  rtx attempt, head;
3565 	  int match_len;
3566 
3567 	  if (!past_end && !NONDEBUG_INSN_P (insn))
3568 	    {
3569 	    next_insn:
3570 	      insn = NEXT_INSN (insn);
3571 	      if (insn == NEXT_INSN (BB_END (bb)))
3572 		past_end = true;
3573 	      continue;
3574 	    }
3575 	  if (!past_end && peep2_fill_buffer (bb, insn, live))
3576 	    goto next_insn;
3577 
3578 	  /* If we did not fill an empty buffer, it signals the end of the
3579 	     block.  */
3580 	  if (peep2_current_count == 0)
3581 	    break;
3582 
3583 	  /* The buffer filled to the current maximum, so try to match.  */
3584 
3585 	  pos = peep2_buf_position (peep2_current + peep2_current_count);
3586 	  peep2_insn_data[pos].insn = PEEP2_EOB;
3587 	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3588 
3589 	  /* Match the peephole.  */
3590 	  head = peep2_insn_data[peep2_current].insn;
3591 	  attempt = peephole2_insns (PATTERN (head), head, &match_len);
3592 	  if (attempt != NULL)
3593 	    {
3594 	      rtx last = peep2_attempt (bb, head, match_len, attempt);
3595 	      if (last)
3596 		{
3597 		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3598 		  continue;
3599 		}
3600 	    }
3601 
3602 	  /* No match: advance the buffer by one insn.  */
3603 	  peep2_current = peep2_buf_position (peep2_current + 1);
3604 	  peep2_current_count--;
3605 	}
3606     }
3607 
3608   default_rtl_profile ();
3609   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3610     BITMAP_FREE (peep2_insn_data[i].live_before);
3611   BITMAP_FREE (live);
3612   if (peep2_do_rebuild_jump_labels)
3613     rebuild_jump_labels (get_insns ());
3614 }
3615 #endif /* HAVE_peephole2 */
3616 
3617 /* Common predicates for use with define_bypass.  */
3618 
3619 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3620    data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3621    must be either a single_set or a PARALLEL with SETs inside.  */
3622 
3623 int
3624 store_data_bypass_p (rtx out_insn, rtx in_insn)
3625 {
3626   rtx out_set, in_set;
3627   rtx out_pat, in_pat;
3628   rtx out_exp, in_exp;
3629   int i, j;
3630 
3631   in_set = single_set (in_insn);
3632   if (in_set)
3633     {
3634       if (!MEM_P (SET_DEST (in_set)))
3635 	return false;
3636 
3637       out_set = single_set (out_insn);
3638       if (out_set)
3639         {
3640           if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3641             return false;
3642         }
3643       else
3644         {
3645           out_pat = PATTERN (out_insn);
3646 
3647 	  if (GET_CODE (out_pat) != PARALLEL)
3648 	    return false;
3649 
3650           for (i = 0; i < XVECLEN (out_pat, 0); i++)
3651           {
3652             out_exp = XVECEXP (out_pat, 0, i);
3653 
3654             if (GET_CODE (out_exp) == CLOBBER)
3655               continue;
3656 
3657             gcc_assert (GET_CODE (out_exp) == SET);
3658 
3659             if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3660               return false;
3661           }
3662       }
3663     }
3664   else
3665     {
3666       in_pat = PATTERN (in_insn);
3667       gcc_assert (GET_CODE (in_pat) == PARALLEL);
3668 
3669       for (i = 0; i < XVECLEN (in_pat, 0); i++)
3670 	{
3671 	  in_exp = XVECEXP (in_pat, 0, i);
3672 
3673 	  if (GET_CODE (in_exp) == CLOBBER)
3674 	    continue;
3675 
3676 	  gcc_assert (GET_CODE (in_exp) == SET);
3677 
3678 	  if (!MEM_P (SET_DEST (in_exp)))
3679 	    return false;
3680 
3681           out_set = single_set (out_insn);
3682           if (out_set)
3683             {
3684               if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3685                 return false;
3686             }
3687           else
3688             {
3689               out_pat = PATTERN (out_insn);
3690               gcc_assert (GET_CODE (out_pat) == PARALLEL);
3691 
3692               for (j = 0; j < XVECLEN (out_pat, 0); j++)
3693                 {
3694                   out_exp = XVECEXP (out_pat, 0, j);
3695 
3696                   if (GET_CODE (out_exp) == CLOBBER)
3697                     continue;
3698 
3699                   gcc_assert (GET_CODE (out_exp) == SET);
3700 
3701                   if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3702                     return false;
3703                 }
3704             }
3705         }
3706     }
3707 
3708   return true;
3709 }
3710 
3711 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3712    condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3713    or multiple set; IN_INSN should be single_set for truth, but for convenience
3714    of insn categorization may be any JUMP or CALL insn.  */
3715 
3716 int
3717 if_test_bypass_p (rtx out_insn, rtx in_insn)
3718 {
3719   rtx out_set, in_set;
3720 
3721   in_set = single_set (in_insn);
3722   if (! in_set)
3723     {
3724       gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3725       return false;
3726     }
3727 
3728   if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3729     return false;
3730   in_set = SET_SRC (in_set);
3731 
3732   out_set = single_set (out_insn);
3733   if (out_set)
3734     {
3735       if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3736 	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3737 	return false;
3738     }
3739   else
3740     {
3741       rtx out_pat;
3742       int i;
3743 
3744       out_pat = PATTERN (out_insn);
3745       gcc_assert (GET_CODE (out_pat) == PARALLEL);
3746 
3747       for (i = 0; i < XVECLEN (out_pat, 0); i++)
3748 	{
3749 	  rtx exp = XVECEXP (out_pat, 0, i);
3750 
3751 	  if (GET_CODE (exp) == CLOBBER)
3752 	    continue;
3753 
3754 	  gcc_assert (GET_CODE (exp) == SET);
3755 
3756 	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3757 	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3758 	    return false;
3759 	}
3760     }
3761 
3762   return true;
3763 }
3764 
3765 static bool
3766 gate_handle_peephole2 (void)
3767 {
3768   return (optimize > 0 && flag_peephole2);
3769 }
3770 
3771 static unsigned int
3772 rest_of_handle_peephole2 (void)
3773 {
3774 #ifdef HAVE_peephole2
3775   peephole2_optimize ();
3776 #endif
3777   return 0;
3778 }
3779 
3780 struct rtl_opt_pass pass_peephole2 =
3781 {
3782  {
3783   RTL_PASS,
3784   "peephole2",                          /* name */
3785   OPTGROUP_NONE,                        /* optinfo_flags */
3786   gate_handle_peephole2,                /* gate */
3787   rest_of_handle_peephole2,             /* execute */
3788   NULL,                                 /* sub */
3789   NULL,                                 /* next */
3790   0,                                    /* static_pass_number */
3791   TV_PEEPHOLE2,                         /* tv_id */
3792   0,                                    /* properties_required */
3793   0,                                    /* properties_provided */
3794   0,                                    /* properties_destroyed */
3795   0,                                    /* todo_flags_start */
3796   TODO_df_finish | TODO_verify_rtl_sharing |
3797   0                                    /* todo_flags_finish */
3798  }
3799 };
3800 
3801 static unsigned int
3802 rest_of_handle_split_all_insns (void)
3803 {
3804   split_all_insns ();
3805   return 0;
3806 }
3807 
3808 struct rtl_opt_pass pass_split_all_insns =
3809 {
3810  {
3811   RTL_PASS,
3812   "split1",                             /* name */
3813   OPTGROUP_NONE,                        /* optinfo_flags */
3814   NULL,                                 /* gate */
3815   rest_of_handle_split_all_insns,       /* execute */
3816   NULL,                                 /* sub */
3817   NULL,                                 /* next */
3818   0,                                    /* static_pass_number */
3819   TV_NONE,                              /* tv_id */
3820   0,                                    /* properties_required */
3821   0,                                    /* properties_provided */
3822   0,                                    /* properties_destroyed */
3823   0,                                    /* todo_flags_start */
3824   0                                     /* todo_flags_finish */
3825  }
3826 };
3827 
3828 static unsigned int
3829 rest_of_handle_split_after_reload (void)
3830 {
3831   /* If optimizing, then go ahead and split insns now.  */
3832 #ifndef STACK_REGS
3833   if (optimize > 0)
3834 #endif
3835     split_all_insns ();
3836   return 0;
3837 }
3838 
3839 struct rtl_opt_pass pass_split_after_reload =
3840 {
3841  {
3842   RTL_PASS,
3843   "split2",                             /* name */
3844   OPTGROUP_NONE,                        /* optinfo_flags */
3845   NULL,                                 /* gate */
3846   rest_of_handle_split_after_reload,    /* execute */
3847   NULL,                                 /* sub */
3848   NULL,                                 /* next */
3849   0,                                    /* static_pass_number */
3850   TV_NONE,                              /* tv_id */
3851   0,                                    /* properties_required */
3852   0,                                    /* properties_provided */
3853   0,                                    /* properties_destroyed */
3854   0,                                    /* todo_flags_start */
3855   0                                     /* todo_flags_finish */
3856  }
3857 };
3858 
3859 static bool
3860 gate_handle_split_before_regstack (void)
3861 {
3862 #if HAVE_ATTR_length && defined (STACK_REGS)
3863   /* If flow2 creates new instructions which need splitting
3864      and scheduling after reload is not done, they might not be
3865      split until final which doesn't allow splitting
3866      if HAVE_ATTR_length.  */
3867 # ifdef INSN_SCHEDULING
3868   return (optimize && !flag_schedule_insns_after_reload);
3869 # else
3870   return (optimize);
3871 # endif
3872 #else
3873   return 0;
3874 #endif
3875 }
3876 
3877 static unsigned int
3878 rest_of_handle_split_before_regstack (void)
3879 {
3880   split_all_insns ();
3881   return 0;
3882 }
3883 
3884 struct rtl_opt_pass pass_split_before_regstack =
3885 {
3886  {
3887   RTL_PASS,
3888   "split3",                             /* name */
3889   OPTGROUP_NONE,                        /* optinfo_flags */
3890   gate_handle_split_before_regstack,    /* gate */
3891   rest_of_handle_split_before_regstack, /* execute */
3892   NULL,                                 /* sub */
3893   NULL,                                 /* next */
3894   0,                                    /* static_pass_number */
3895   TV_NONE,                              /* tv_id */
3896   0,                                    /* properties_required */
3897   0,                                    /* properties_provided */
3898   0,                                    /* properties_destroyed */
3899   0,                                    /* todo_flags_start */
3900   0                                     /* todo_flags_finish */
3901  }
3902 };
3903 
3904 static bool
3905 gate_handle_split_before_sched2 (void)
3906 {
3907 #ifdef INSN_SCHEDULING
3908   return optimize > 0 && flag_schedule_insns_after_reload;
3909 #else
3910   return 0;
3911 #endif
3912 }
3913 
3914 static unsigned int
3915 rest_of_handle_split_before_sched2 (void)
3916 {
3917 #ifdef INSN_SCHEDULING
3918   split_all_insns ();
3919 #endif
3920   return 0;
3921 }
3922 
3923 struct rtl_opt_pass pass_split_before_sched2 =
3924 {
3925  {
3926   RTL_PASS,
3927   "split4",                             /* name */
3928   OPTGROUP_NONE,                        /* optinfo_flags */
3929   gate_handle_split_before_sched2,      /* gate */
3930   rest_of_handle_split_before_sched2,   /* execute */
3931   NULL,                                 /* sub */
3932   NULL,                                 /* next */
3933   0,                                    /* static_pass_number */
3934   TV_NONE,                              /* tv_id */
3935   0,                                    /* properties_required */
3936   0,                                    /* properties_provided */
3937   0,                                    /* properties_destroyed */
3938   0,                                    /* todo_flags_start */
3939   TODO_verify_flow                      /* todo_flags_finish */
3940  }
3941 };
3942 
3943 /* The placement of the splitting that we do for shorten_branches
3944    depends on whether regstack is used by the target or not.  */
3945 static bool
3946 gate_do_final_split (void)
3947 {
3948 #if HAVE_ATTR_length && !defined (STACK_REGS)
3949   return 1;
3950 #else
3951   return 0;
3952 #endif
3953 }
3954 
3955 struct rtl_opt_pass pass_split_for_shorten_branches =
3956 {
3957  {
3958   RTL_PASS,
3959   "split5",                             /* name */
3960   OPTGROUP_NONE,                        /* optinfo_flags */
3961   gate_do_final_split,                  /* gate */
3962   split_all_insns_noflow,               /* execute */
3963   NULL,                                 /* sub */
3964   NULL,                                 /* next */
3965   0,                                    /* static_pass_number */
3966   TV_NONE,                              /* tv_id */
3967   0,                                    /* properties_required */
3968   0,                                    /* properties_provided */
3969   0,                                    /* properties_destroyed */
3970   0,                                    /* todo_flags_start */
3971   TODO_verify_rtl_sharing               /* todo_flags_finish */
3972  }
3973 };
3974