xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/recog.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Subroutines used by or related to instruction recognition.
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 
44 #ifndef STACK_POP_CODE
45 #if STACK_GROWS_DOWNWARD
46 #define STACK_POP_CODE POST_INC
47 #else
48 #define STACK_POP_CODE POST_DEC
49 #endif
50 #endif
51 
52 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
53 static void validate_replace_src_1 (rtx *, void *);
54 static rtx_insn *split_insn (rtx_insn *);
55 
56 struct target_recog default_target_recog;
57 #if SWITCHABLE_TARGET
58 struct target_recog *this_target_recog = &default_target_recog;
59 #endif
60 
61 /* Nonzero means allow operands to be volatile.
62    This should be 0 if you are generating rtl, such as if you are calling
63    the functions in optabs.c and expmed.c (most of the time).
64    This should be 1 if all valid insns need to be recognized,
65    such as in reginfo.c and final.c and reload.c.
66 
67    init_recog and init_recog_no_volatile are responsible for setting this.  */
68 
69 int volatile_ok;
70 
71 struct recog_data_d recog_data;
72 
73 /* Contains a vector of operand_alternative structures, such that
74    operand OP of alternative A is at index A * n_operands + OP.
75    Set up by preprocess_constraints.  */
76 const operand_alternative *recog_op_alt;
77 
78 /* Used to provide recog_op_alt for asms.  */
79 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
80 				      * MAX_RECOG_ALTERNATIVES];
81 
82 /* On return from `constrain_operands', indicate which alternative
83    was satisfied.  */
84 
85 int which_alternative;
86 
87 /* Nonzero after end of reload pass.
88    Set to 1 or 0 by toplev.c.
89    Controls the significance of (SUBREG (MEM)).  */
90 
91 int reload_completed;
92 
93 /* Nonzero after thread_prologue_and_epilogue_insns has run.  */
94 int epilogue_completed;
95 
96 /* Initialize data used by the function `recog'.
97    This must be called once in the compilation of a function
98    before any insn recognition may be done in the function.  */
99 
100 void
101 init_recog_no_volatile (void)
102 {
103   volatile_ok = 0;
104 }
105 
106 void
107 init_recog (void)
108 {
109   volatile_ok = 1;
110 }
111 
112 
113 /* Return true if labels in asm operands BODY are LABEL_REFs.  */
114 
115 static bool
116 asm_labels_ok (rtx body)
117 {
118   rtx asmop;
119   int i;
120 
121   asmop = extract_asm_operands (body);
122   if (asmop == NULL_RTX)
123     return true;
124 
125   for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
126     if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
127       return false;
128 
129   return true;
130 }
131 
132 /* Check that X is an insn-body for an `asm' with operands
133    and that the operands mentioned in it are legitimate.  */
134 
135 int
136 check_asm_operands (rtx x)
137 {
138   int noperands;
139   rtx *operands;
140   const char **constraints;
141   int i;
142 
143   if (!asm_labels_ok (x))
144     return 0;
145 
146   /* Post-reload, be more strict with things.  */
147   if (reload_completed)
148     {
149       /* ??? Doh!  We've not got the wrapping insn.  Cook one up.  */
150       rtx_insn *insn = make_insn_raw (x);
151       extract_insn (insn);
152       constrain_operands (1, get_enabled_alternatives (insn));
153       return which_alternative >= 0;
154     }
155 
156   noperands = asm_noperands (x);
157   if (noperands < 0)
158     return 0;
159   if (noperands == 0)
160     return 1;
161 
162   operands = XALLOCAVEC (rtx, noperands);
163   constraints = XALLOCAVEC (const char *, noperands);
164 
165   decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
166 
167   for (i = 0; i < noperands; i++)
168     {
169       const char *c = constraints[i];
170       if (c[0] == '%')
171 	c++;
172       if (! asm_operand_ok (operands[i], c, constraints))
173 	return 0;
174     }
175 
176   return 1;
177 }
178 
179 /* Static data for the next two routines.  */
180 
181 struct change_t
182 {
183   rtx object;
184   int old_code;
185   bool unshare;
186   rtx *loc;
187   rtx old;
188 };
189 
190 static change_t *changes;
191 static int changes_allocated;
192 
193 static int num_changes = 0;
194 
195 /* Validate a proposed change to OBJECT.  LOC is the location in the rtl
196    at which NEW_RTX will be placed.  If OBJECT is zero, no validation is done,
197    the change is simply made.
198 
199    Two types of objects are supported:  If OBJECT is a MEM, memory_address_p
200    will be called with the address and mode as parameters.  If OBJECT is
201    an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
202    the change in place.
203 
204    IN_GROUP is nonzero if this is part of a group of changes that must be
205    performed as a group.  In that case, the changes will be stored.  The
206    function `apply_change_group' will validate and apply the changes.
207 
208    If IN_GROUP is zero, this is a single change.  Try to recognize the insn
209    or validate the memory reference with the change applied.  If the result
210    is not valid for the machine, suppress the change and return zero.
211    Otherwise, perform the change and return 1.  */
212 
213 static bool
214 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
215 {
216   rtx old = *loc;
217 
218   if (old == new_rtx || rtx_equal_p (old, new_rtx))
219     return 1;
220 
221   gcc_assert (in_group != 0 || num_changes == 0);
222 
223   *loc = new_rtx;
224 
225   /* Save the information describing this change.  */
226   if (num_changes >= changes_allocated)
227     {
228       if (changes_allocated == 0)
229 	/* This value allows for repeated substitutions inside complex
230 	   indexed addresses, or changes in up to 5 insns.  */
231 	changes_allocated = MAX_RECOG_OPERANDS * 5;
232       else
233 	changes_allocated *= 2;
234 
235       changes = XRESIZEVEC (change_t, changes, changes_allocated);
236     }
237 
238   changes[num_changes].object = object;
239   changes[num_changes].loc = loc;
240   changes[num_changes].old = old;
241   changes[num_changes].unshare = unshare;
242 
243   if (object && !MEM_P (object))
244     {
245       /* Set INSN_CODE to force rerecognition of insn.  Save old code in
246 	 case invalid.  */
247       changes[num_changes].old_code = INSN_CODE (object);
248       INSN_CODE (object) = -1;
249     }
250 
251   num_changes++;
252 
253   /* If we are making a group of changes, return 1.  Otherwise, validate the
254      change group we made.  */
255 
256   if (in_group)
257     return 1;
258   else
259     return apply_change_group ();
260 }
261 
262 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
263    UNSHARE to false.  */
264 
265 bool
266 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
267 {
268   return validate_change_1 (object, loc, new_rtx, in_group, false);
269 }
270 
271 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
272    UNSHARE to true.  */
273 
274 bool
275 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
276 {
277   return validate_change_1 (object, loc, new_rtx, in_group, true);
278 }
279 
280 
281 /* Keep X canonicalized if some changes have made it non-canonical; only
282    modifies the operands of X, not (for example) its code.  Simplifications
283    are not the job of this routine.
284 
285    Return true if anything was changed.  */
286 bool
287 canonicalize_change_group (rtx_insn *insn, rtx x)
288 {
289   if (COMMUTATIVE_P (x)
290       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
291     {
292       /* Oops, the caller has made X no longer canonical.
293 	 Let's redo the changes in the correct order.  */
294       rtx tem = XEXP (x, 0);
295       validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
296       validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
297       return true;
298     }
299   else
300     return false;
301 }
302 
303 
304 /* This subroutine of apply_change_group verifies whether the changes to INSN
305    were valid; i.e. whether INSN can still be recognized.
306 
307    If IN_GROUP is true clobbers which have to be added in order to
308    match the instructions will be added to the current change group.
309    Otherwise the changes will take effect immediately.  */
310 
311 int
312 insn_invalid_p (rtx_insn *insn, bool in_group)
313 {
314   rtx pat = PATTERN (insn);
315   int num_clobbers = 0;
316   /* If we are before reload and the pattern is a SET, see if we can add
317      clobbers.  */
318   int icode = recog (pat, insn,
319 		     (GET_CODE (pat) == SET
320 		      && ! reload_completed
321                       && ! reload_in_progress)
322 		     ? &num_clobbers : 0);
323   int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
324 
325 
326   /* If this is an asm and the operand aren't legal, then fail.  Likewise if
327      this is not an asm and the insn wasn't recognized.  */
328   if ((is_asm && ! check_asm_operands (PATTERN (insn)))
329       || (!is_asm && icode < 0))
330     return 1;
331 
332   /* If we have to add CLOBBERs, fail if we have to add ones that reference
333      hard registers since our callers can't know if they are live or not.
334      Otherwise, add them.  */
335   if (num_clobbers > 0)
336     {
337       rtx newpat;
338 
339       if (added_clobbers_hard_reg_p (icode))
340 	return 1;
341 
342       newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
343       XVECEXP (newpat, 0, 0) = pat;
344       add_clobbers (newpat, icode);
345       if (in_group)
346 	validate_change (insn, &PATTERN (insn), newpat, 1);
347       else
348 	PATTERN (insn) = pat = newpat;
349     }
350 
351   /* After reload, verify that all constraints are satisfied.  */
352   if (reload_completed)
353     {
354       extract_insn (insn);
355 
356       if (! constrain_operands (1, get_preferred_alternatives (insn)))
357 	return 1;
358     }
359 
360   INSN_CODE (insn) = icode;
361   return 0;
362 }
363 
364 /* Return number of changes made and not validated yet.  */
365 int
366 num_changes_pending (void)
367 {
368   return num_changes;
369 }
370 
371 /* Tentatively apply the changes numbered NUM and up.
372    Return 1 if all changes are valid, zero otherwise.  */
373 
374 int
375 verify_changes (int num)
376 {
377   int i;
378   rtx last_validated = NULL_RTX;
379 
380   /* The changes have been applied and all INSN_CODEs have been reset to force
381      rerecognition.
382 
383      The changes are valid if we aren't given an object, or if we are
384      given a MEM and it still is a valid address, or if this is in insn
385      and it is recognized.  In the latter case, if reload has completed,
386      we also require that the operands meet the constraints for
387      the insn.  */
388 
389   for (i = num; i < num_changes; i++)
390     {
391       rtx object = changes[i].object;
392 
393       /* If there is no object to test or if it is the same as the one we
394          already tested, ignore it.  */
395       if (object == 0 || object == last_validated)
396 	continue;
397 
398       if (MEM_P (object))
399 	{
400 	  if (! memory_address_addr_space_p (GET_MODE (object),
401 					     XEXP (object, 0),
402 					     MEM_ADDR_SPACE (object)))
403 	    break;
404 	}
405       else if (/* changes[i].old might be zero, e.g. when putting a
406 	       REG_FRAME_RELATED_EXPR into a previously empty list.  */
407 	       changes[i].old
408 	       && REG_P (changes[i].old)
409 	       && asm_noperands (PATTERN (object)) > 0
410 	       && REG_EXPR (changes[i].old) != NULL_TREE
411 	       && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (changes[i].old))
412 	       && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
413 	       && DECL_REGISTER (REG_EXPR (changes[i].old)))
414 	{
415 	  /* Don't allow changes of hard register operands to inline
416 	     assemblies if they have been defined as register asm ("x").  */
417 	  break;
418 	}
419       else if (DEBUG_INSN_P (object))
420 	continue;
421       else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
422 	{
423 	  rtx pat = PATTERN (object);
424 
425 	  /* Perhaps we couldn't recognize the insn because there were
426 	     extra CLOBBERs at the end.  If so, try to re-recognize
427 	     without the last CLOBBER (later iterations will cause each of
428 	     them to be eliminated, in turn).  But don't do this if we
429 	     have an ASM_OPERAND.  */
430 	  if (GET_CODE (pat) == PARALLEL
431 	      && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
432 	      && asm_noperands (PATTERN (object)) < 0)
433 	    {
434 	      rtx newpat;
435 
436 	      if (XVECLEN (pat, 0) == 2)
437 		newpat = XVECEXP (pat, 0, 0);
438 	      else
439 		{
440 		  int j;
441 
442 		  newpat
443 		    = gen_rtx_PARALLEL (VOIDmode,
444 					rtvec_alloc (XVECLEN (pat, 0) - 1));
445 		  for (j = 0; j < XVECLEN (newpat, 0); j++)
446 		    XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
447 		}
448 
449 	      /* Add a new change to this group to replace the pattern
450 		 with this new pattern.  Then consider this change
451 		 as having succeeded.  The change we added will
452 		 cause the entire call to fail if things remain invalid.
453 
454 		 Note that this can lose if a later change than the one
455 		 we are processing specified &XVECEXP (PATTERN (object), 0, X)
456 		 but this shouldn't occur.  */
457 
458 	      validate_change (object, &PATTERN (object), newpat, 1);
459 	      continue;
460 	    }
461 	  else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
462 		   || GET_CODE (pat) == VAR_LOCATION)
463 	    /* If this insn is a CLOBBER or USE, it is always valid, but is
464 	       never recognized.  */
465 	    continue;
466 	  else
467 	    break;
468 	}
469       last_validated = object;
470     }
471 
472   return (i == num_changes);
473 }
474 
475 /* A group of changes has previously been issued with validate_change
476    and verified with verify_changes.  Call df_insn_rescan for each of
477    the insn changed and clear num_changes.  */
478 
479 void
480 confirm_change_group (void)
481 {
482   int i;
483   rtx last_object = NULL;
484 
485   for (i = 0; i < num_changes; i++)
486     {
487       rtx object = changes[i].object;
488 
489       if (changes[i].unshare)
490 	*changes[i].loc = copy_rtx (*changes[i].loc);
491 
492       /* Avoid unnecessary rescanning when multiple changes to same instruction
493          are made.  */
494       if (object)
495 	{
496 	  if (object != last_object && last_object && INSN_P (last_object))
497 	    df_insn_rescan (as_a <rtx_insn *> (last_object));
498 	  last_object = object;
499 	}
500     }
501 
502   if (last_object && INSN_P (last_object))
503     df_insn_rescan (as_a <rtx_insn *> (last_object));
504   num_changes = 0;
505 }
506 
507 /* Apply a group of changes previously issued with `validate_change'.
508    If all changes are valid, call confirm_change_group and return 1,
509    otherwise, call cancel_changes and return 0.  */
510 
511 int
512 apply_change_group (void)
513 {
514   if (verify_changes (0))
515     {
516       confirm_change_group ();
517       return 1;
518     }
519   else
520     {
521       cancel_changes (0);
522       return 0;
523     }
524 }
525 
526 
527 /* Return the number of changes so far in the current group.  */
528 
529 int
530 num_validated_changes (void)
531 {
532   return num_changes;
533 }
534 
535 /* Retract the changes numbered NUM and up.  */
536 
537 void
538 cancel_changes (int num)
539 {
540   int i;
541 
542   /* Back out all the changes.  Do this in the opposite order in which
543      they were made.  */
544   for (i = num_changes - 1; i >= num; i--)
545     {
546       *changes[i].loc = changes[i].old;
547       if (changes[i].object && !MEM_P (changes[i].object))
548 	INSN_CODE (changes[i].object) = changes[i].old_code;
549     }
550   num_changes = num;
551 }
552 
553 /* Reduce conditional compilation elsewhere.  */
554 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
555    rtx.  */
556 
557 static void
558 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
559                           machine_mode op0_mode)
560 {
561   rtx x = *loc;
562   enum rtx_code code = GET_CODE (x);
563   rtx new_rtx = NULL_RTX;
564   scalar_int_mode is_mode;
565 
566   if (SWAPPABLE_OPERANDS_P (x)
567       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
568     {
569       validate_unshare_change (object, loc,
570 			       gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
571 					       : swap_condition (code),
572 					       GET_MODE (x), XEXP (x, 1),
573 					       XEXP (x, 0)), 1);
574       x = *loc;
575       code = GET_CODE (x);
576     }
577 
578   /* Canonicalize arithmetics with all constant operands.  */
579   switch (GET_RTX_CLASS (code))
580     {
581     case RTX_UNARY:
582       if (CONSTANT_P (XEXP (x, 0)))
583 	new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
584 					    op0_mode);
585       break;
586     case RTX_COMM_ARITH:
587     case RTX_BIN_ARITH:
588       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
589 	new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
590 					     XEXP (x, 1));
591       break;
592     case RTX_COMPARE:
593     case RTX_COMM_COMPARE:
594       if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
595 	new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
596 						 XEXP (x, 0), XEXP (x, 1));
597       break;
598     default:
599       break;
600     }
601   if (new_rtx)
602     {
603       validate_change (object, loc, new_rtx, 1);
604       return;
605     }
606 
607   switch (code)
608     {
609     case PLUS:
610       /* If we have a PLUS whose second operand is now a CONST_INT, use
611          simplify_gen_binary to try to simplify it.
612          ??? We may want later to remove this, once simplification is
613          separated from this function.  */
614       if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
615 	validate_change (object, loc,
616 			 simplify_gen_binary
617 			 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
618       break;
619     case MINUS:
620       if (CONST_SCALAR_INT_P (XEXP (x, 1)))
621 	validate_change (object, loc,
622 			 simplify_gen_binary
623 			 (PLUS, GET_MODE (x), XEXP (x, 0),
624 			  simplify_gen_unary (NEG,
625 					      GET_MODE (x), XEXP (x, 1),
626 					      GET_MODE (x))), 1);
627       break;
628     case ZERO_EXTEND:
629     case SIGN_EXTEND:
630       if (GET_MODE (XEXP (x, 0)) == VOIDmode)
631 	{
632 	  new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
633 				    op0_mode);
634 	  /* If any of the above failed, substitute in something that
635 	     we know won't be recognized.  */
636 	  if (!new_rtx)
637 	    new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
638 	  validate_change (object, loc, new_rtx, 1);
639 	}
640       break;
641     case SUBREG:
642       /* All subregs possible to simplify should be simplified.  */
643       new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
644 			     SUBREG_BYTE (x));
645 
646       /* Subregs of VOIDmode operands are incorrect.  */
647       if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
648 	new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
649       if (new_rtx)
650 	validate_change (object, loc, new_rtx, 1);
651       break;
652     case ZERO_EXTRACT:
653     case SIGN_EXTRACT:
654       /* If we are replacing a register with memory, try to change the memory
655          to be the mode required for memory in extract operations (this isn't
656          likely to be an insertion operation; if it was, nothing bad will
657          happen, we might just fail in some cases).  */
658 
659       if (MEM_P (XEXP (x, 0))
660 	  && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
661 	  && CONST_INT_P (XEXP (x, 1))
662 	  && CONST_INT_P (XEXP (x, 2))
663 	  && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
664 					MEM_ADDR_SPACE (XEXP (x, 0)))
665 	  && !MEM_VOLATILE_P (XEXP (x, 0)))
666 	{
667 	  int pos = INTVAL (XEXP (x, 2));
668 	  machine_mode new_mode = is_mode;
669 	  if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
670 	    new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
671 	  else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
672 	    new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
673 	  scalar_int_mode wanted_mode = (new_mode == VOIDmode
674 					 ? word_mode
675 					 : as_a <scalar_int_mode> (new_mode));
676 
677 	  /* If we have a narrower mode, we can do something.  */
678 	  if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
679 	    {
680 	      int offset = pos / BITS_PER_UNIT;
681 	      rtx newmem;
682 
683 	      /* If the bytes and bits are counted differently, we
684 	         must adjust the offset.  */
685 	      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
686 		offset =
687 		  (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
688 		   offset);
689 
690 	      gcc_assert (GET_MODE_PRECISION (wanted_mode)
691 			  == GET_MODE_BITSIZE (wanted_mode));
692 	      pos %= GET_MODE_BITSIZE (wanted_mode);
693 
694 	      newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
695 
696 	      validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
697 	      validate_change (object, &XEXP (x, 0), newmem, 1);
698 	    }
699 	}
700 
701       break;
702 
703     default:
704       break;
705     }
706 }
707 
708 /* Replace every occurrence of FROM in X with TO.  Mark each change with
709    validate_change passing OBJECT.  */
710 
711 static void
712 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
713                         bool simplify)
714 {
715   int i, j;
716   const char *fmt;
717   rtx x = *loc;
718   enum rtx_code code;
719   machine_mode op0_mode = VOIDmode;
720   int prev_changes = num_changes;
721 
722   if (!x)
723     return;
724 
725   code = GET_CODE (x);
726   fmt = GET_RTX_FORMAT (code);
727   if (fmt[0] == 'e')
728     op0_mode = GET_MODE (XEXP (x, 0));
729 
730   /* X matches FROM if it is the same rtx or they are both referring to the
731      same register in the same mode.  Avoid calling rtx_equal_p unless the
732      operands look similar.  */
733 
734   if (x == from
735       || (REG_P (x) && REG_P (from)
736 	  && GET_MODE (x) == GET_MODE (from)
737 	  && REGNO (x) == REGNO (from))
738       || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
739 	  && rtx_equal_p (x, from)))
740     {
741       validate_unshare_change (object, loc, to, 1);
742       return;
743     }
744 
745   /* Call ourself recursively to perform the replacements.
746      We must not replace inside already replaced expression, otherwise we
747      get infinite recursion for replacements like (reg X)->(subreg (reg X))
748      so we must special case shared ASM_OPERANDS.  */
749 
750   if (GET_CODE (x) == PARALLEL)
751     {
752       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
753 	{
754 	  if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
755 	      && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
756 	    {
757 	      /* Verify that operands are really shared.  */
758 	      gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
759 			  == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
760 							      (x, 0, j))));
761 	      validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
762 				      from, to, object, simplify);
763 	    }
764 	  else
765 	    validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
766                                     simplify);
767 	}
768     }
769   else
770     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
771       {
772 	if (fmt[i] == 'e')
773 	  validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
774 	else if (fmt[i] == 'E')
775 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
776 	    validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
777                                     simplify);
778       }
779 
780   /* If we didn't substitute, there is nothing more to do.  */
781   if (num_changes == prev_changes)
782     return;
783 
784   /* ??? The regmove is no more, so is this aberration still necessary?  */
785   /* Allow substituted expression to have different mode.  This is used by
786      regmove to change mode of pseudo register.  */
787   if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
788     op0_mode = GET_MODE (XEXP (x, 0));
789 
790   /* Do changes needed to keep rtx consistent.  Don't do any other
791      simplifications, as it is not our job.  */
792   if (simplify)
793     simplify_while_replacing (loc, to, object, op0_mode);
794 }
795 
796 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
797    with TO.  After all changes have been made, validate by seeing
798    if INSN is still valid.  */
799 
800 int
801 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
802 {
803   validate_replace_rtx_1 (loc, from, to, insn, true);
804   return apply_change_group ();
805 }
806 
807 /* Try replacing every occurrence of FROM in INSN with TO.  After all
808    changes have been made, validate by seeing if INSN is still valid.  */
809 
810 int
811 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
812 {
813   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
814   return apply_change_group ();
815 }
816 
817 /* Try replacing every occurrence of FROM in WHERE with TO.  Assume that WHERE
818    is a part of INSN.  After all changes have been made, validate by seeing if
819    INSN is still valid.
820    validate_replace_rtx (from, to, insn) is equivalent to
821    validate_replace_rtx_part (from, to, &PATTERN (insn), insn).  */
822 
823 int
824 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
825 {
826   validate_replace_rtx_1 (where, from, to, insn, true);
827   return apply_change_group ();
828 }
829 
830 /* Same as above, but do not simplify rtx afterwards.  */
831 int
832 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
833 				      rtx_insn *insn)
834 {
835   validate_replace_rtx_1 (where, from, to, insn, false);
836   return apply_change_group ();
837 
838 }
839 
840 /* Try replacing every occurrence of FROM in INSN with TO.  This also
841    will replace in REG_EQUAL and REG_EQUIV notes.  */
842 
843 void
844 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
845 {
846   rtx note;
847   validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
848   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
849     if (REG_NOTE_KIND (note) == REG_EQUAL
850 	|| REG_NOTE_KIND (note) == REG_EQUIV)
851       validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
852 }
853 
854 /* Function called by note_uses to replace used subexpressions.  */
855 struct validate_replace_src_data
856 {
857   rtx from;			/* Old RTX */
858   rtx to;			/* New RTX */
859   rtx_insn *insn;			/* Insn in which substitution is occurring.  */
860 };
861 
862 static void
863 validate_replace_src_1 (rtx *x, void *data)
864 {
865   struct validate_replace_src_data *d
866     = (struct validate_replace_src_data *) data;
867 
868   validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
869 }
870 
871 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
872    SET_DESTs.  */
873 
874 void
875 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
876 {
877   struct validate_replace_src_data d;
878 
879   d.from = from;
880   d.to = to;
881   d.insn = insn;
882   note_uses (&PATTERN (insn), validate_replace_src_1, &d);
883 }
884 
885 /* Try simplify INSN.
886    Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
887    pattern and return true if something was simplified.  */
888 
889 bool
890 validate_simplify_insn (rtx_insn *insn)
891 {
892   int i;
893   rtx pat = NULL;
894   rtx newpat = NULL;
895 
896   pat = PATTERN (insn);
897 
898   if (GET_CODE (pat) == SET)
899     {
900       newpat = simplify_rtx (SET_SRC (pat));
901       if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
902 	validate_change (insn, &SET_SRC (pat), newpat, 1);
903       newpat = simplify_rtx (SET_DEST (pat));
904       if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
905 	validate_change (insn, &SET_DEST (pat), newpat, 1);
906     }
907   else if (GET_CODE (pat) == PARALLEL)
908     for (i = 0; i < XVECLEN (pat, 0); i++)
909       {
910 	rtx s = XVECEXP (pat, 0, i);
911 
912 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
913 	  {
914 	    newpat = simplify_rtx (SET_SRC (s));
915 	    if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
916 	      validate_change (insn, &SET_SRC (s), newpat, 1);
917 	    newpat = simplify_rtx (SET_DEST (s));
918 	    if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
919 	      validate_change (insn, &SET_DEST (s), newpat, 1);
920 	  }
921       }
922   return ((num_changes_pending () > 0) && (apply_change_group () > 0));
923 }
924 
925 /* Return 1 if the insn using CC0 set by INSN does not contain
926    any ordered tests applied to the condition codes.
927    EQ and NE tests do not count.  */
928 
929 int
930 next_insn_tests_no_inequality (rtx_insn *insn)
931 {
932   rtx_insn *next = next_cc0_user (insn);
933 
934   /* If there is no next insn, we have to take the conservative choice.  */
935   if (next == 0)
936     return 0;
937 
938   return (INSN_P (next)
939 	  && ! inequality_comparisons_p (PATTERN (next)));
940 }
941 
942 /* Return 1 if OP is a valid general operand for machine mode MODE.
943    This is either a register reference, a memory reference,
944    or a constant.  In the case of a memory reference, the address
945    is checked for general validity for the target machine.
946 
947    Register and memory references must have mode MODE in order to be valid,
948    but some constants have no machine mode and are valid for any mode.
949 
950    If MODE is VOIDmode, OP is checked for validity for whatever mode
951    it has.
952 
953    The main use of this function is as a predicate in match_operand
954    expressions in the machine description.  */
955 
956 int
957 general_operand (rtx op, machine_mode mode)
958 {
959   enum rtx_code code = GET_CODE (op);
960 
961   if (mode == VOIDmode)
962     mode = GET_MODE (op);
963 
964   /* Don't accept CONST_INT or anything similar
965      if the caller wants something floating.  */
966   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
967       && GET_MODE_CLASS (mode) != MODE_INT
968       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
969     return 0;
970 
971   if (CONST_INT_P (op)
972       && mode != VOIDmode
973       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
974     return 0;
975 
976   if (CONSTANT_P (op))
977     return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
978 	     || mode == VOIDmode)
979 	    && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
980 	    && targetm.legitimate_constant_p (mode == VOIDmode
981 					      ? GET_MODE (op)
982 					      : mode, op));
983 
984   /* Except for certain constants with VOIDmode, already checked for,
985      OP's mode must match MODE if MODE specifies a mode.  */
986 
987   if (GET_MODE (op) != mode)
988     return 0;
989 
990   if (code == SUBREG)
991     {
992       rtx sub = SUBREG_REG (op);
993 
994 #ifdef INSN_SCHEDULING
995       /* On machines that have insn scheduling, we want all memory
996 	 reference to be explicit, so outlaw paradoxical SUBREGs.
997 	 However, we must allow them after reload so that they can
998 	 get cleaned up by cleanup_subreg_operands.  */
999       if (!reload_completed && MEM_P (sub)
1000 	  && paradoxical_subreg_p (op))
1001 	return 0;
1002 #endif
1003       /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1004          may result in incorrect reference.  We should simplify all valid
1005          subregs of MEM anyway.  But allow this after reload because we
1006 	 might be called from cleanup_subreg_operands.
1007 
1008 	 ??? This is a kludge.  */
1009       if (!reload_completed
1010 	  && maybe_ne (SUBREG_BYTE (op), 0)
1011 	  && MEM_P (sub))
1012 	return 0;
1013 
1014       if (REG_P (sub)
1015 	  && REGNO (sub) < FIRST_PSEUDO_REGISTER
1016 	  && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1017 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1018 	  && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1019 	  /* LRA can generate some invalid SUBREGS just for matched
1020 	     operand reload presentation.  LRA needs to treat them as
1021 	     valid.  */
1022 	  && ! LRA_SUBREG_P (op))
1023 	return 0;
1024 
1025       /* FLOAT_MODE subregs can't be paradoxical.  Combine will occasionally
1026 	 create such rtl, and we must reject it.  */
1027       if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1028 	  /* LRA can use subreg to store a floating point value in an
1029 	     integer mode.  Although the floating point and the
1030 	     integer modes need the same number of hard registers, the
1031 	     size of floating point mode can be less than the integer
1032 	     mode.  */
1033 	  && ! lra_in_progress
1034 	  && paradoxical_subreg_p (op))
1035 	return 0;
1036 
1037       op = sub;
1038       code = GET_CODE (op);
1039     }
1040 
1041   if (code == REG)
1042     return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1043 	    || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1044 
1045   if (code == MEM)
1046     {
1047       rtx y = XEXP (op, 0);
1048 
1049       if (! volatile_ok && MEM_VOLATILE_P (op))
1050 	return 0;
1051 
1052       /* Use the mem's mode, since it will be reloaded thus.  LRA can
1053 	 generate move insn with invalid addresses which is made valid
1054 	 and efficiently calculated by LRA through further numerous
1055 	 transformations.  */
1056       if (lra_in_progress
1057 	  || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1058 	return 1;
1059     }
1060 
1061   return 0;
1062 }
1063 
1064 /* Return 1 if OP is a valid memory address for a memory reference
1065    of mode MODE.
1066 
1067    The main use of this function is as a predicate in match_operand
1068    expressions in the machine description.  */
1069 
1070 int
1071 address_operand (rtx op, machine_mode mode)
1072 {
1073   /* Wrong mode for an address expr.  */
1074   if (GET_MODE (op) != VOIDmode
1075       && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1076     return false;
1077 
1078   return memory_address_p (mode, op);
1079 }
1080 
1081 /* Return 1 if OP is a register reference of mode MODE.
1082    If MODE is VOIDmode, accept a register in any mode.
1083 
1084    The main use of this function is as a predicate in match_operand
1085    expressions in the machine description.  */
1086 
1087 int
1088 register_operand (rtx op, machine_mode mode)
1089 {
1090   if (GET_CODE (op) == SUBREG)
1091     {
1092       rtx sub = SUBREG_REG (op);
1093 
1094       /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1095 	 because it is guaranteed to be reloaded into one.
1096 	 Just make sure the MEM is valid in itself.
1097 	 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1098 	 but currently it does result from (SUBREG (REG)...) where the
1099 	 reg went on the stack.)  */
1100       if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1101 	return 0;
1102     }
1103   else if (!REG_P (op))
1104     return 0;
1105   return general_operand (op, mode);
1106 }
1107 
1108 /* Return 1 for a register in Pmode; ignore the tested mode.  */
1109 
1110 int
1111 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1112 {
1113   return register_operand (op, Pmode);
1114 }
1115 
1116 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1117    or a hard register.  */
1118 
1119 int
1120 scratch_operand (rtx op, machine_mode mode)
1121 {
1122   if (GET_MODE (op) != mode && mode != VOIDmode)
1123     return 0;
1124 
1125   return (GET_CODE (op) == SCRATCH
1126 	  || (REG_P (op)
1127 	      && (lra_in_progress
1128 		  || (REGNO (op) < FIRST_PSEUDO_REGISTER
1129 		      && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1130 }
1131 
1132 /* Return 1 if OP is a valid immediate operand for mode MODE.
1133 
1134    The main use of this function is as a predicate in match_operand
1135    expressions in the machine description.  */
1136 
1137 int
1138 immediate_operand (rtx op, machine_mode mode)
1139 {
1140   /* Don't accept CONST_INT or anything similar
1141      if the caller wants something floating.  */
1142   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1143       && GET_MODE_CLASS (mode) != MODE_INT
1144       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1145     return 0;
1146 
1147   if (CONST_INT_P (op)
1148       && mode != VOIDmode
1149       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1150     return 0;
1151 
1152   return (CONSTANT_P (op)
1153 	  && (GET_MODE (op) == mode || mode == VOIDmode
1154 	      || GET_MODE (op) == VOIDmode)
1155 	  && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1156 	  && targetm.legitimate_constant_p (mode == VOIDmode
1157 					    ? GET_MODE (op)
1158 					    : mode, op));
1159 }
1160 
1161 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE.  */
1162 
1163 int
1164 const_int_operand (rtx op, machine_mode mode)
1165 {
1166   if (!CONST_INT_P (op))
1167     return 0;
1168 
1169   if (mode != VOIDmode
1170       && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1171     return 0;
1172 
1173   return 1;
1174 }
1175 
1176 #if TARGET_SUPPORTS_WIDE_INT
1177 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1178    of mode MODE.  */
1179 int
1180 const_scalar_int_operand (rtx op, machine_mode mode)
1181 {
1182   if (!CONST_SCALAR_INT_P (op))
1183     return 0;
1184 
1185   if (CONST_INT_P (op))
1186     return const_int_operand (op, mode);
1187 
1188   if (mode != VOIDmode)
1189     {
1190       scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1191       int prec = GET_MODE_PRECISION (int_mode);
1192       int bitsize = GET_MODE_BITSIZE (int_mode);
1193 
1194       if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1195 	return 0;
1196 
1197       if (prec == bitsize)
1198 	return 1;
1199       else
1200 	{
1201 	  /* Multiword partial int.  */
1202 	  HOST_WIDE_INT x
1203 	    = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1204 	  return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1205 	}
1206     }
1207   return 1;
1208 }
1209 
1210 /* Returns 1 if OP is an operand that is a constant integer or constant
1211    floating-point number of MODE.  */
1212 
1213 int
1214 const_double_operand (rtx op, machine_mode mode)
1215 {
1216   return (GET_CODE (op) == CONST_DOUBLE)
1217 	  && (GET_MODE (op) == mode || mode == VOIDmode);
1218 }
1219 #else
1220 /* Returns 1 if OP is an operand that is a constant integer or constant
1221    floating-point number of MODE.  */
1222 
1223 int
1224 const_double_operand (rtx op, machine_mode mode)
1225 {
1226   /* Don't accept CONST_INT or anything similar
1227      if the caller wants something floating.  */
1228   if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1229       && GET_MODE_CLASS (mode) != MODE_INT
1230       && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1231     return 0;
1232 
1233   return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1234 	  && (mode == VOIDmode || GET_MODE (op) == mode
1235 	      || GET_MODE (op) == VOIDmode));
1236 }
1237 #endif
1238 /* Return 1 if OP is a general operand that is not an immediate
1239    operand of mode MODE.  */
1240 
1241 int
1242 nonimmediate_operand (rtx op, machine_mode mode)
1243 {
1244   return (general_operand (op, mode) && ! CONSTANT_P (op));
1245 }
1246 
1247 /* Return 1 if OP is a register reference or immediate value of mode MODE.  */
1248 
1249 int
1250 nonmemory_operand (rtx op, machine_mode mode)
1251 {
1252   if (CONSTANT_P (op))
1253     return immediate_operand (op, mode);
1254   return register_operand (op, mode);
1255 }
1256 
1257 /* Return 1 if OP is a valid operand that stands for pushing a
1258    value of mode MODE onto the stack.
1259 
1260    The main use of this function is as a predicate in match_operand
1261    expressions in the machine description.  */
1262 
1263 int
1264 push_operand (rtx op, machine_mode mode)
1265 {
1266   if (!MEM_P (op))
1267     return 0;
1268 
1269   if (mode != VOIDmode && GET_MODE (op) != mode)
1270     return 0;
1271 
1272   poly_int64 rounded_size = GET_MODE_SIZE (mode);
1273 
1274 #ifdef PUSH_ROUNDING
1275   rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1276 #endif
1277 
1278   op = XEXP (op, 0);
1279 
1280   if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1281     {
1282       if (GET_CODE (op) != STACK_PUSH_CODE)
1283 	return 0;
1284     }
1285   else
1286     {
1287       poly_int64 offset;
1288       if (GET_CODE (op) != PRE_MODIFY
1289 	  || GET_CODE (XEXP (op, 1)) != PLUS
1290 	  || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1291 	  || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1292 	  || (STACK_GROWS_DOWNWARD
1293 	      ? maybe_ne (offset, -rounded_size)
1294 	      : maybe_ne (offset, rounded_size)))
1295 	return 0;
1296     }
1297 
1298   return XEXP (op, 0) == stack_pointer_rtx;
1299 }
1300 
1301 /* Return 1 if OP is a valid operand that stands for popping a
1302    value of mode MODE off the stack.
1303 
1304    The main use of this function is as a predicate in match_operand
1305    expressions in the machine description.  */
1306 
1307 int
1308 pop_operand (rtx op, machine_mode mode)
1309 {
1310   if (!MEM_P (op))
1311     return 0;
1312 
1313   if (mode != VOIDmode && GET_MODE (op) != mode)
1314     return 0;
1315 
1316   op = XEXP (op, 0);
1317 
1318   if (GET_CODE (op) != STACK_POP_CODE)
1319     return 0;
1320 
1321   return XEXP (op, 0) == stack_pointer_rtx;
1322 }
1323 
1324 /* Return 1 if ADDR is a valid memory address
1325    for mode MODE in address space AS.  */
1326 
1327 int
1328 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1329 			     rtx addr, addr_space_t as)
1330 {
1331 #ifdef GO_IF_LEGITIMATE_ADDRESS
1332   gcc_assert (ADDR_SPACE_GENERIC_P (as));
1333   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1334   return 0;
1335 
1336  win:
1337   return 1;
1338 #else
1339   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1340 #endif
1341 }
1342 
1343 /* Return 1 if OP is a valid memory reference with mode MODE,
1344    including a valid address.
1345 
1346    The main use of this function is as a predicate in match_operand
1347    expressions in the machine description.  */
1348 
1349 int
1350 memory_operand (rtx op, machine_mode mode)
1351 {
1352   rtx inner;
1353 
1354   if (! reload_completed)
1355     /* Note that no SUBREG is a memory operand before end of reload pass,
1356        because (SUBREG (MEM...)) forces reloading into a register.  */
1357     return MEM_P (op) && general_operand (op, mode);
1358 
1359   if (mode != VOIDmode && GET_MODE (op) != mode)
1360     return 0;
1361 
1362   inner = op;
1363   if (GET_CODE (inner) == SUBREG)
1364     inner = SUBREG_REG (inner);
1365 
1366   return (MEM_P (inner) && general_operand (op, mode));
1367 }
1368 
1369 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1370    that is, a memory reference whose address is a general_operand.  */
1371 
1372 int
1373 indirect_operand (rtx op, machine_mode mode)
1374 {
1375   /* Before reload, a SUBREG isn't in memory (see memory_operand, above).  */
1376   if (! reload_completed
1377       && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1378     {
1379       if (mode != VOIDmode && GET_MODE (op) != mode)
1380 	return 0;
1381 
1382       /* The only way that we can have a general_operand as the resulting
1383 	 address is if OFFSET is zero and the address already is an operand
1384 	 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1385 	 operand.  */
1386       poly_int64 offset;
1387       rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1388       return (known_eq (offset + SUBREG_BYTE (op), 0)
1389 	      && general_operand (addr, Pmode));
1390     }
1391 
1392   return (MEM_P (op)
1393 	  && memory_operand (op, mode)
1394 	  && general_operand (XEXP (op, 0), Pmode));
1395 }
1396 
1397 /* Return 1 if this is an ordered comparison operator (not including
1398    ORDERED and UNORDERED).  */
1399 
1400 int
1401 ordered_comparison_operator (rtx op, machine_mode mode)
1402 {
1403   if (mode != VOIDmode && GET_MODE (op) != mode)
1404     return false;
1405   switch (GET_CODE (op))
1406     {
1407     case EQ:
1408     case NE:
1409     case LT:
1410     case LTU:
1411     case LE:
1412     case LEU:
1413     case GT:
1414     case GTU:
1415     case GE:
1416     case GEU:
1417       return true;
1418     default:
1419       return false;
1420     }
1421 }
1422 
1423 /* Return 1 if this is a comparison operator.  This allows the use of
1424    MATCH_OPERATOR to recognize all the branch insns.  */
1425 
1426 int
1427 comparison_operator (rtx op, machine_mode mode)
1428 {
1429   return ((mode == VOIDmode || GET_MODE (op) == mode)
1430 	  && COMPARISON_P (op));
1431 }
1432 
1433 /* If BODY is an insn body that uses ASM_OPERANDS, return it.  */
1434 
1435 rtx
1436 extract_asm_operands (rtx body)
1437 {
1438   rtx tmp;
1439   switch (GET_CODE (body))
1440     {
1441     case ASM_OPERANDS:
1442       return body;
1443 
1444     case SET:
1445       /* Single output operand: BODY is (set OUTPUT (asm_operands ...)).  */
1446       tmp = SET_SRC (body);
1447       if (GET_CODE (tmp) == ASM_OPERANDS)
1448 	return tmp;
1449       break;
1450 
1451     case PARALLEL:
1452       tmp = XVECEXP (body, 0, 0);
1453       if (GET_CODE (tmp) == ASM_OPERANDS)
1454 	return tmp;
1455       if (GET_CODE (tmp) == SET)
1456 	{
1457 	  tmp = SET_SRC (tmp);
1458 	  if (GET_CODE (tmp) == ASM_OPERANDS)
1459 	    return tmp;
1460 	}
1461       break;
1462 
1463     default:
1464       break;
1465     }
1466   return NULL;
1467 }
1468 
1469 /* If BODY is an insn body that uses ASM_OPERANDS,
1470    return the number of operands (both input and output) in the insn.
1471    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1472    return 0.
1473    Otherwise return -1.  */
1474 
1475 int
1476 asm_noperands (const_rtx body)
1477 {
1478   rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1479   int i, n_sets = 0;
1480 
1481   if (asm_op == NULL)
1482     {
1483       if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1484 	  && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1485 	{
1486 	  /* body is [(asm_input ...) (clobber (reg ...))...].  */
1487 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1488 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1489 	      return -1;
1490 	  return 0;
1491 	}
1492       return -1;
1493     }
1494 
1495   if (GET_CODE (body) == SET)
1496     n_sets = 1;
1497   else if (GET_CODE (body) == PARALLEL)
1498     {
1499       if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1500 	{
1501 	  /* Multiple output operands, or 1 output plus some clobbers:
1502 	     body is
1503 	     [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...].  */
1504 	  /* Count backwards through CLOBBERs to determine number of SETs.  */
1505 	  for (i = XVECLEN (body, 0); i > 0; i--)
1506 	    {
1507 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1508 		break;
1509 	      if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1510 		return -1;
1511 	    }
1512 
1513 	  /* N_SETS is now number of output operands.  */
1514 	  n_sets = i;
1515 
1516 	  /* Verify that all the SETs we have
1517 	     came from a single original asm_operands insn
1518 	     (so that invalid combinations are blocked).  */
1519 	  for (i = 0; i < n_sets; i++)
1520 	    {
1521 	      rtx elt = XVECEXP (body, 0, i);
1522 	      if (GET_CODE (elt) != SET)
1523 		return -1;
1524 	      if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1525 		return -1;
1526 	      /* If these ASM_OPERANDS rtx's came from different original insns
1527 	         then they aren't allowed together.  */
1528 	      if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1529 		  != ASM_OPERANDS_INPUT_VEC (asm_op))
1530 		return -1;
1531 	    }
1532 	}
1533       else
1534 	{
1535 	  /* 0 outputs, but some clobbers:
1536 	     body is [(asm_operands ...) (clobber (reg ...))...].  */
1537 	  /* Make sure all the other parallel things really are clobbers.  */
1538 	  for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1539 	    if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1540 	      return -1;
1541 	}
1542     }
1543 
1544   return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1545 	  + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1546 }
1547 
1548 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1549    copy its operands (both input and output) into the vector OPERANDS,
1550    the locations of the operands within the insn into the vector OPERAND_LOCS,
1551    and the constraints for the operands into CONSTRAINTS.
1552    Write the modes of the operands into MODES.
1553    Write the location info into LOC.
1554    Return the assembler-template.
1555    If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1556    return the basic assembly string.
1557 
1558    If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1559    we don't store that info.  */
1560 
1561 const char *
1562 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1563 		     const char **constraints, machine_mode *modes,
1564 		     location_t *loc)
1565 {
1566   int nbase = 0, n, i;
1567   rtx asmop;
1568 
1569   switch (GET_CODE (body))
1570     {
1571     case ASM_OPERANDS:
1572       /* Zero output asm: BODY is (asm_operands ...).  */
1573       asmop = body;
1574       break;
1575 
1576     case SET:
1577       /* Single output asm: BODY is (set OUTPUT (asm_operands ...)).  */
1578       asmop = SET_SRC (body);
1579 
1580       /* The output is in the SET.
1581 	 Its constraint is in the ASM_OPERANDS itself.  */
1582       if (operands)
1583 	operands[0] = SET_DEST (body);
1584       if (operand_locs)
1585 	operand_locs[0] = &SET_DEST (body);
1586       if (constraints)
1587 	constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1588       if (modes)
1589 	modes[0] = GET_MODE (SET_DEST (body));
1590       nbase = 1;
1591       break;
1592 
1593     case PARALLEL:
1594       {
1595 	int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs.  */
1596 
1597 	asmop = XVECEXP (body, 0, 0);
1598 	if (GET_CODE (asmop) == SET)
1599 	  {
1600 	    asmop = SET_SRC (asmop);
1601 
1602 	    /* At least one output, plus some CLOBBERs.  The outputs are in
1603 	       the SETs.  Their constraints are in the ASM_OPERANDS itself.  */
1604 	    for (i = 0; i < nparallel; i++)
1605 	      {
1606 		if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1607 		  break;		/* Past last SET */
1608 		gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
1609 		if (operands)
1610 		  operands[i] = SET_DEST (XVECEXP (body, 0, i));
1611 		if (operand_locs)
1612 		  operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1613 		if (constraints)
1614 		  constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1615 		if (modes)
1616 		  modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1617 	      }
1618 	    nbase = i;
1619 	  }
1620 	else if (GET_CODE (asmop) == ASM_INPUT)
1621 	  {
1622 	    if (loc)
1623 	      *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1624 	    return XSTR (asmop, 0);
1625 	  }
1626 	break;
1627       }
1628 
1629     default:
1630       gcc_unreachable ();
1631     }
1632 
1633   n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1634   for (i = 0; i < n; i++)
1635     {
1636       if (operand_locs)
1637 	operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1638       if (operands)
1639 	operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1640       if (constraints)
1641 	constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1642       if (modes)
1643 	modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1644     }
1645   nbase += n;
1646 
1647   n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1648   for (i = 0; i < n; i++)
1649     {
1650       if (operand_locs)
1651 	operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1652       if (operands)
1653 	operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1654       if (constraints)
1655 	constraints[nbase + i] = "";
1656       if (modes)
1657 	modes[nbase + i] = Pmode;
1658     }
1659 
1660   if (loc)
1661     *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1662 
1663   return ASM_OPERANDS_TEMPLATE (asmop);
1664 }
1665 
1666 /* Parse inline assembly string STRING and determine which operands are
1667    referenced by % markers.  For the first NOPERANDS operands, set USED[I]
1668    to true if operand I is referenced.
1669 
1670    This is intended to distinguish barrier-like asms such as:
1671 
1672       asm ("" : "=m" (...));
1673 
1674    from real references such as:
1675 
1676       asm ("sw\t$0, %0" : "=m" (...));  */
1677 
1678 void
1679 get_referenced_operands (const char *string, bool *used,
1680 			 unsigned int noperands)
1681 {
1682   memset (used, 0, sizeof (bool) * noperands);
1683   const char *p = string;
1684   while (*p)
1685     switch (*p)
1686       {
1687       case '%':
1688 	p += 1;
1689 	/* A letter followed by a digit indicates an operand number.  */
1690 	if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1691 	  p += 1;
1692 	if (ISDIGIT (*p))
1693 	  {
1694 	    char *endptr;
1695 	    unsigned long opnum = strtoul (p, &endptr, 10);
1696 	    if (endptr != p && opnum < noperands)
1697 	      used[opnum] = true;
1698 	    p = endptr;
1699 	  }
1700 	else
1701 	  p += 1;
1702 	break;
1703 
1704       default:
1705 	p++;
1706 	break;
1707       }
1708 }
1709 
1710 /* Check if an asm_operand matches its constraints.
1711    Return > 0 if ok, = 0 if bad, < 0 if inconclusive.  */
1712 
1713 int
1714 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1715 {
1716   int result = 0;
1717   bool incdec_ok = false;
1718 
1719   /* Use constrain_operands after reload.  */
1720   gcc_assert (!reload_completed);
1721 
1722   /* Empty constraint string is the same as "X,...,X", i.e. X for as
1723      many alternatives as required to match the other operands.  */
1724   if (*constraint == '\0')
1725     result = 1;
1726 
1727   while (*constraint)
1728     {
1729       enum constraint_num cn;
1730       char c = *constraint;
1731       int len;
1732       switch (c)
1733 	{
1734 	case ',':
1735 	  constraint++;
1736 	  continue;
1737 
1738 	case '0': case '1': case '2': case '3': case '4':
1739 	case '5': case '6': case '7': case '8': case '9':
1740 	  /* If caller provided constraints pointer, look up
1741 	     the matching constraint.  Otherwise, our caller should have
1742 	     given us the proper matching constraint, but we can't
1743 	     actually fail the check if they didn't.  Indicate that
1744 	     results are inconclusive.  */
1745 	  if (constraints)
1746 	    {
1747 	      char *end;
1748 	      unsigned long match;
1749 
1750 	      match = strtoul (constraint, &end, 10);
1751 	      if (!result)
1752 		result = asm_operand_ok (op, constraints[match], NULL);
1753 	      constraint = (const char *) end;
1754 	    }
1755 	  else
1756 	    {
1757 	      do
1758 		constraint++;
1759 	      while (ISDIGIT (*constraint));
1760 	      if (! result)
1761 		result = -1;
1762 	    }
1763 	  continue;
1764 
1765 	  /* The rest of the compiler assumes that reloading the address
1766 	     of a MEM into a register will make it fit an 'o' constraint.
1767 	     That is, if it sees a MEM operand for an 'o' constraint,
1768 	     it assumes that (mem (base-reg)) will fit.
1769 
1770 	     That assumption fails on targets that don't have offsettable
1771 	     addresses at all.  We therefore need to treat 'o' asm
1772 	     constraints as a special case and only accept operands that
1773 	     are already offsettable, thus proving that at least one
1774 	     offsettable address exists.  */
1775 	case 'o': /* offsettable */
1776 	  if (offsettable_nonstrict_memref_p (op))
1777 	    result = 1;
1778 	  break;
1779 
1780 	case 'g':
1781 	  if (general_operand (op, VOIDmode))
1782 	    result = 1;
1783 	  break;
1784 
1785 	case '<':
1786 	case '>':
1787 	  /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1788 	     to exist, excepting those that expand_call created.  Further,
1789 	     on some machines which do not have generalized auto inc/dec,
1790 	     an inc/dec is not a memory_operand.
1791 
1792 	     Match any memory and hope things are resolved after reload.  */
1793 	  incdec_ok = true;
1794 	  /* FALLTHRU */
1795 	default:
1796 	  cn = lookup_constraint (constraint);
1797 	  switch (get_constraint_type (cn))
1798 	    {
1799 	    case CT_REGISTER:
1800 	      if (!result
1801 		  && reg_class_for_constraint (cn) != NO_REGS
1802 		  && GET_MODE (op) != BLKmode
1803 		  && register_operand (op, VOIDmode))
1804 		result = 1;
1805 	      break;
1806 
1807 	    case CT_CONST_INT:
1808 	      if (!result
1809 		  && CONST_INT_P (op)
1810 		  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1811 		result = 1;
1812 	      break;
1813 
1814 	    case CT_MEMORY:
1815 	    case CT_SPECIAL_MEMORY:
1816 	      /* Every memory operand can be reloaded to fit.  */
1817 	      result = result || memory_operand (op, VOIDmode);
1818 	      break;
1819 
1820 	    case CT_ADDRESS:
1821 	      /* Every address operand can be reloaded to fit.  */
1822 	      result = result || address_operand (op, VOIDmode);
1823 	      break;
1824 
1825 	    case CT_FIXED_FORM:
1826 	      result = result || constraint_satisfied_p (op, cn);
1827 	      break;
1828 	    }
1829 	  break;
1830 	}
1831       len = CONSTRAINT_LEN (c, constraint);
1832       do
1833 	constraint++;
1834       while (--len && *constraint && *constraint != ',');
1835       if (len)
1836 	return 0;
1837     }
1838 
1839   /* For operands without < or > constraints reject side-effects.  */
1840   if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1841     switch (GET_CODE (XEXP (op, 0)))
1842       {
1843       case PRE_INC:
1844       case POST_INC:
1845       case PRE_DEC:
1846       case POST_DEC:
1847       case PRE_MODIFY:
1848       case POST_MODIFY:
1849 	return 0;
1850       default:
1851 	break;
1852       }
1853 
1854   return result;
1855 }
1856 
1857 /* Given an rtx *P, if it is a sum containing an integer constant term,
1858    return the location (type rtx *) of the pointer to that constant term.
1859    Otherwise, return a null pointer.  */
1860 
1861 rtx *
1862 find_constant_term_loc (rtx *p)
1863 {
1864   rtx *tem;
1865   enum rtx_code code = GET_CODE (*p);
1866 
1867   /* If *P IS such a constant term, P is its location.  */
1868 
1869   if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1870       || code == CONST)
1871     return p;
1872 
1873   /* Otherwise, if not a sum, it has no constant term.  */
1874 
1875   if (GET_CODE (*p) != PLUS)
1876     return 0;
1877 
1878   /* If one of the summands is constant, return its location.  */
1879 
1880   if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1881       && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1882     return p;
1883 
1884   /* Otherwise, check each summand for containing a constant term.  */
1885 
1886   if (XEXP (*p, 0) != 0)
1887     {
1888       tem = find_constant_term_loc (&XEXP (*p, 0));
1889       if (tem != 0)
1890 	return tem;
1891     }
1892 
1893   if (XEXP (*p, 1) != 0)
1894     {
1895       tem = find_constant_term_loc (&XEXP (*p, 1));
1896       if (tem != 0)
1897 	return tem;
1898     }
1899 
1900   return 0;
1901 }
1902 
1903 /* Return 1 if OP is a memory reference
1904    whose address contains no side effects
1905    and remains valid after the addition
1906    of a positive integer less than the
1907    size of the object being referenced.
1908 
1909    We assume that the original address is valid and do not check it.
1910 
1911    This uses strict_memory_address_p as a subroutine, so
1912    don't use it before reload.  */
1913 
1914 int
1915 offsettable_memref_p (rtx op)
1916 {
1917   return ((MEM_P (op))
1918 	  && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1919 					       MEM_ADDR_SPACE (op)));
1920 }
1921 
1922 /* Similar, but don't require a strictly valid mem ref:
1923    consider pseudo-regs valid as index or base regs.  */
1924 
1925 int
1926 offsettable_nonstrict_memref_p (rtx op)
1927 {
1928   return ((MEM_P (op))
1929 	  && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1930 					       MEM_ADDR_SPACE (op)));
1931 }
1932 
1933 /* Return 1 if Y is a memory address which contains no side effects
1934    and would remain valid for address space AS after the addition of
1935    a positive integer less than the size of that mode.
1936 
1937    We assume that the original address is valid and do not check it.
1938    We do check that it is valid for narrower modes.
1939 
1940    If STRICTP is nonzero, we require a strictly valid address,
1941    for the sake of use in reload.c.  */
1942 
1943 int
1944 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1945 				  addr_space_t as)
1946 {
1947   enum rtx_code ycode = GET_CODE (y);
1948   rtx z;
1949   rtx y1 = y;
1950   rtx *y2;
1951   int (*addressp) (machine_mode, rtx, addr_space_t) =
1952     (strictp ? strict_memory_address_addr_space_p
1953 	     : memory_address_addr_space_p);
1954   poly_int64 mode_sz = GET_MODE_SIZE (mode);
1955 
1956   if (CONSTANT_ADDRESS_P (y))
1957     return 1;
1958 
1959   /* Adjusting an offsettable address involves changing to a narrower mode.
1960      Make sure that's OK.  */
1961 
1962   if (mode_dependent_address_p (y, as))
1963     return 0;
1964 
1965   machine_mode address_mode = GET_MODE (y);
1966   if (address_mode == VOIDmode)
1967     address_mode = targetm.addr_space.address_mode (as);
1968 #ifdef POINTERS_EXTEND_UNSIGNED
1969   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1970 #endif
1971 
1972   /* ??? How much offset does an offsettable BLKmode reference need?
1973      Clearly that depends on the situation in which it's being used.
1974      However, the current situation in which we test 0xffffffff is
1975      less than ideal.  Caveat user.  */
1976   if (known_eq (mode_sz, 0))
1977     mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1978 
1979   /* If the expression contains a constant term,
1980      see if it remains valid when max possible offset is added.  */
1981 
1982   if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1983     {
1984       int good;
1985 
1986       y1 = *y2;
1987       *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1988       /* Use QImode because an odd displacement may be automatically invalid
1989 	 for any wider mode.  But it should be valid for a single byte.  */
1990       good = (*addressp) (QImode, y, as);
1991 
1992       /* In any case, restore old contents of memory.  */
1993       *y2 = y1;
1994       return good;
1995     }
1996 
1997   if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1998     return 0;
1999 
2000   /* The offset added here is chosen as the maximum offset that
2001      any instruction could need to add when operating on something
2002      of the specified mode.  We assume that if Y and Y+c are
2003      valid addresses then so is Y+d for all 0<d<c.  adjust_address will
2004      go inside a LO_SUM here, so we do so as well.  */
2005   if (GET_CODE (y) == LO_SUM
2006       && mode != BLKmode
2007       && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2008     z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2009 			plus_constant (address_mode, XEXP (y, 1),
2010 				       mode_sz - 1));
2011 #ifdef POINTERS_EXTEND_UNSIGNED
2012   /* Likewise for a ZERO_EXTEND from pointer_mode.  */
2013   else if (POINTERS_EXTEND_UNSIGNED > 0
2014 	   && GET_CODE (y) == ZERO_EXTEND
2015 	   && GET_MODE (XEXP (y, 0)) == pointer_mode)
2016     z = gen_rtx_ZERO_EXTEND (address_mode,
2017 			     plus_constant (pointer_mode, XEXP (y, 0),
2018 					    mode_sz - 1));
2019 #endif
2020   else
2021     z = plus_constant (address_mode, y, mode_sz - 1);
2022 
2023   /* Use QImode because an odd displacement may be automatically invalid
2024      for any wider mode.  But it should be valid for a single byte.  */
2025   return (*addressp) (QImode, z, as);
2026 }
2027 
2028 /* Return 1 if ADDR is an address-expression whose effect depends
2029    on the mode of the memory reference it is used in.
2030 
2031    ADDRSPACE is the address space associated with the address.
2032 
2033    Autoincrement addressing is a typical example of mode-dependence
2034    because the amount of the increment depends on the mode.  */
2035 
2036 bool
2037 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2038 {
2039   /* Auto-increment addressing with anything other than post_modify
2040      or pre_modify always introduces a mode dependency.  Catch such
2041      cases now instead of deferring to the target.  */
2042   if (GET_CODE (addr) == PRE_INC
2043       || GET_CODE (addr) == POST_INC
2044       || GET_CODE (addr) == PRE_DEC
2045       || GET_CODE (addr) == POST_DEC)
2046     return true;
2047 
2048   return targetm.mode_dependent_address_p (addr, addrspace);
2049 }
2050 
2051 /* Return true if boolean attribute ATTR is supported.  */
2052 
2053 static bool
2054 have_bool_attr (bool_attr attr)
2055 {
2056   switch (attr)
2057     {
2058     case BA_ENABLED:
2059       return HAVE_ATTR_enabled;
2060     case BA_PREFERRED_FOR_SIZE:
2061       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2062     case BA_PREFERRED_FOR_SPEED:
2063       return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2064     }
2065   gcc_unreachable ();
2066 }
2067 
2068 /* Return the value of ATTR for instruction INSN.  */
2069 
2070 static bool
2071 get_bool_attr (rtx_insn *insn, bool_attr attr)
2072 {
2073   switch (attr)
2074     {
2075     case BA_ENABLED:
2076       return get_attr_enabled (insn);
2077     case BA_PREFERRED_FOR_SIZE:
2078       return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2079     case BA_PREFERRED_FOR_SPEED:
2080       return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2081     }
2082   gcc_unreachable ();
2083 }
2084 
2085 /* Like get_bool_attr_mask, but don't use the cache.  */
2086 
2087 static alternative_mask
2088 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2089 {
2090   /* Temporarily install enough information for get_attr_<foo> to assume
2091      that the insn operands are already cached.  As above, the attribute
2092      mustn't depend on the values of operands, so we don't provide their
2093      real values here.  */
2094   rtx_insn *old_insn = recog_data.insn;
2095   int old_alternative = which_alternative;
2096 
2097   recog_data.insn = insn;
2098   alternative_mask mask = ALL_ALTERNATIVES;
2099   int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2100   for (int i = 0; i < n_alternatives; i++)
2101     {
2102       which_alternative = i;
2103       if (!get_bool_attr (insn, attr))
2104 	mask &= ~ALTERNATIVE_BIT (i);
2105     }
2106 
2107   recog_data.insn = old_insn;
2108   which_alternative = old_alternative;
2109   return mask;
2110 }
2111 
2112 /* Return the mask of operand alternatives that are allowed for INSN
2113    by boolean attribute ATTR.  This mask depends only on INSN and on
2114    the current target; it does not depend on things like the values of
2115    operands.  */
2116 
2117 static alternative_mask
2118 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2119 {
2120   /* Quick exit for asms and for targets that don't use these attributes.  */
2121   int code = INSN_CODE (insn);
2122   if (code < 0 || !have_bool_attr (attr))
2123     return ALL_ALTERNATIVES;
2124 
2125   /* Calling get_attr_<foo> can be expensive, so cache the mask
2126      for speed.  */
2127   if (!this_target_recog->x_bool_attr_masks[code][attr])
2128     this_target_recog->x_bool_attr_masks[code][attr]
2129       = get_bool_attr_mask_uncached (insn, attr);
2130   return this_target_recog->x_bool_attr_masks[code][attr];
2131 }
2132 
2133 /* Return the set of alternatives of INSN that are allowed by the current
2134    target.  */
2135 
2136 alternative_mask
2137 get_enabled_alternatives (rtx_insn *insn)
2138 {
2139   return get_bool_attr_mask (insn, BA_ENABLED);
2140 }
2141 
2142 /* Return the set of alternatives of INSN that are allowed by the current
2143    target and are preferred for the current size/speed optimization
2144    choice.  */
2145 
2146 alternative_mask
2147 get_preferred_alternatives (rtx_insn *insn)
2148 {
2149   if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2150     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2151   else
2152     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2153 }
2154 
2155 /* Return the set of alternatives of INSN that are allowed by the current
2156    target and are preferred for the size/speed optimization choice
2157    associated with BB.  Passing a separate BB is useful if INSN has not
2158    been emitted yet or if we are considering moving it to a different
2159    block.  */
2160 
2161 alternative_mask
2162 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2163 {
2164   if (optimize_bb_for_speed_p (bb))
2165     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2166   else
2167     return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2168 }
2169 
2170 /* Assert that the cached boolean attributes for INSN are still accurate.
2171    The backend is required to define these attributes in a way that only
2172    depends on the current target (rather than operands, compiler phase,
2173    etc.).  */
2174 
2175 bool
2176 check_bool_attrs (rtx_insn *insn)
2177 {
2178   int code = INSN_CODE (insn);
2179   if (code >= 0)
2180     for (int i = 0; i <= BA_LAST; ++i)
2181       {
2182 	enum bool_attr attr = (enum bool_attr) i;
2183 	if (this_target_recog->x_bool_attr_masks[code][attr])
2184 	  gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2185 		      == get_bool_attr_mask_uncached (insn, attr));
2186       }
2187   return true;
2188 }
2189 
2190 /* Like extract_insn, but save insn extracted and don't extract again, when
2191    called again for the same insn expecting that recog_data still contain the
2192    valid information.  This is used primary by gen_attr infrastructure that
2193    often does extract insn again and again.  */
2194 void
2195 extract_insn_cached (rtx_insn *insn)
2196 {
2197   if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2198     return;
2199   extract_insn (insn);
2200   recog_data.insn = insn;
2201 }
2202 
2203 /* Do uncached extract_insn, constrain_operands and complain about failures.
2204    This should be used when extracting a pre-existing constrained instruction
2205    if the caller wants to know which alternative was chosen.  */
2206 void
2207 extract_constrain_insn (rtx_insn *insn)
2208 {
2209   extract_insn (insn);
2210   if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2211     fatal_insn_not_found (insn);
2212 }
2213 
2214 /* Do cached extract_insn, constrain_operands and complain about failures.
2215    Used by insn_attrtab.  */
2216 void
2217 extract_constrain_insn_cached (rtx_insn *insn)
2218 {
2219   extract_insn_cached (insn);
2220   if (which_alternative == -1
2221       && !constrain_operands (reload_completed,
2222 			      get_enabled_alternatives (insn)))
2223     fatal_insn_not_found (insn);
2224 }
2225 
2226 /* Do cached constrain_operands on INSN and complain about failures.  */
2227 int
2228 constrain_operands_cached (rtx_insn *insn, int strict)
2229 {
2230   if (which_alternative == -1)
2231     return constrain_operands (strict, get_enabled_alternatives (insn));
2232   else
2233     return 1;
2234 }
2235 
2236 /* Analyze INSN and fill in recog_data.  */
2237 
2238 void
2239 extract_insn (rtx_insn *insn)
2240 {
2241   int i;
2242   int icode;
2243   int noperands;
2244   rtx body = PATTERN (insn);
2245 
2246   recog_data.n_operands = 0;
2247   recog_data.n_alternatives = 0;
2248   recog_data.n_dups = 0;
2249   recog_data.is_asm = false;
2250 
2251   switch (GET_CODE (body))
2252     {
2253     case USE:
2254     case CLOBBER:
2255     case ASM_INPUT:
2256     case ADDR_VEC:
2257     case ADDR_DIFF_VEC:
2258     case VAR_LOCATION:
2259     case DEBUG_MARKER:
2260       return;
2261 
2262     case SET:
2263       if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2264 	goto asm_insn;
2265       else
2266 	goto normal_insn;
2267     case PARALLEL:
2268       if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2269 	   && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2270 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2271 	  || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2272 	goto asm_insn;
2273       else
2274 	goto normal_insn;
2275     case ASM_OPERANDS:
2276     asm_insn:
2277       recog_data.n_operands = noperands = asm_noperands (body);
2278       if (noperands >= 0)
2279 	{
2280 	  /* This insn is an `asm' with operands.  */
2281 
2282 	  /* expand_asm_operands makes sure there aren't too many operands.  */
2283 	  gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2284 
2285 	  /* Now get the operand values and constraints out of the insn.  */
2286 	  decode_asm_operands (body, recog_data.operand,
2287 			       recog_data.operand_loc,
2288 			       recog_data.constraints,
2289 			       recog_data.operand_mode, NULL);
2290 	  memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2291 	  if (noperands > 0)
2292 	    {
2293 	      const char *p =  recog_data.constraints[0];
2294 	      recog_data.n_alternatives = 1;
2295 	      while (*p)
2296 		recog_data.n_alternatives += (*p++ == ',');
2297 	    }
2298 	  recog_data.is_asm = true;
2299 	  break;
2300 	}
2301       fatal_insn_not_found (insn);
2302 
2303     default:
2304     normal_insn:
2305       /* Ordinary insn: recognize it, get the operands via insn_extract
2306 	 and get the constraints.  */
2307 
2308       icode = recog_memoized (insn);
2309       if (icode < 0)
2310 	fatal_insn_not_found (insn);
2311 
2312       recog_data.n_operands = noperands = insn_data[icode].n_operands;
2313       recog_data.n_alternatives = insn_data[icode].n_alternatives;
2314       recog_data.n_dups = insn_data[icode].n_dups;
2315 
2316       insn_extract (insn);
2317 
2318       for (i = 0; i < noperands; i++)
2319 	{
2320 	  recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2321 	  recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2322 	  recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2323 	  /* VOIDmode match_operands gets mode from their real operand.  */
2324 	  if (recog_data.operand_mode[i] == VOIDmode)
2325 	    recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2326 	}
2327     }
2328   for (i = 0; i < noperands; i++)
2329     recog_data.operand_type[i]
2330       = (recog_data.constraints[i][0] == '=' ? OP_OUT
2331 	 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2332 	 : OP_IN);
2333 
2334   gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2335 
2336   recog_data.insn = NULL;
2337   which_alternative = -1;
2338 }
2339 
2340 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2341    operands, N_ALTERNATIVES alternatives and constraint strings
2342    CONSTRAINTS.  OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2343    and CONSTRAINTS has N_OPERANDS entries.  OPLOC should be passed in
2344    if the insn is an asm statement and preprocessing should take the
2345    asm operands into account, e.g. to determine whether they could be
2346    addresses in constraints that require addresses; it should then
2347    point to an array of pointers to each operand.  */
2348 
2349 void
2350 preprocess_constraints (int n_operands, int n_alternatives,
2351 			const char **constraints,
2352 			operand_alternative *op_alt_base,
2353 			rtx **oploc)
2354 {
2355   for (int i = 0; i < n_operands; i++)
2356     {
2357       int j;
2358       struct operand_alternative *op_alt;
2359       const char *p = constraints[i];
2360 
2361       op_alt = op_alt_base;
2362 
2363       for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2364 	{
2365 	  op_alt[i].cl = NO_REGS;
2366 	  op_alt[i].constraint = p;
2367 	  op_alt[i].matches = -1;
2368 	  op_alt[i].matched = -1;
2369 
2370 	  if (*p == '\0' || *p == ',')
2371 	    {
2372 	      op_alt[i].anything_ok = 1;
2373 	      continue;
2374 	    }
2375 
2376 	  for (;;)
2377 	    {
2378 	      char c = *p;
2379 	      if (c == '#')
2380 		do
2381 		  c = *++p;
2382 		while (c != ',' && c != '\0');
2383 	      if (c == ',' || c == '\0')
2384 		{
2385 		  p++;
2386 		  break;
2387 		}
2388 
2389 	      switch (c)
2390 		{
2391 		case '?':
2392 		  op_alt[i].reject += 6;
2393 		  break;
2394 		case '!':
2395 		  op_alt[i].reject += 600;
2396 		  break;
2397 		case '&':
2398 		  op_alt[i].earlyclobber = 1;
2399 		  break;
2400 
2401 		case '0': case '1': case '2': case '3': case '4':
2402 		case '5': case '6': case '7': case '8': case '9':
2403 		  {
2404 		    char *end;
2405 		    op_alt[i].matches = strtoul (p, &end, 10);
2406 		    op_alt[op_alt[i].matches].matched = i;
2407 		    p = end;
2408 		  }
2409 		  continue;
2410 
2411 		case 'X':
2412 		  op_alt[i].anything_ok = 1;
2413 		  break;
2414 
2415 		case 'g':
2416 		  op_alt[i].cl =
2417 		   reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2418 		  break;
2419 
2420 		default:
2421 		  enum constraint_num cn = lookup_constraint (p);
2422 		  enum reg_class cl;
2423 		  switch (get_constraint_type (cn))
2424 		    {
2425 		    case CT_REGISTER:
2426 		      cl = reg_class_for_constraint (cn);
2427 		      if (cl != NO_REGS)
2428 			op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2429 		      break;
2430 
2431 		    case CT_CONST_INT:
2432 		      break;
2433 
2434 		    case CT_MEMORY:
2435 		    case CT_SPECIAL_MEMORY:
2436 		      op_alt[i].memory_ok = 1;
2437 		      break;
2438 
2439 		    case CT_ADDRESS:
2440 		      if (oploc && !address_operand (*oploc[i], VOIDmode))
2441 			break;
2442 
2443 		      op_alt[i].is_address = 1;
2444 		      op_alt[i].cl
2445 			= (reg_class_subunion
2446 			   [(int) op_alt[i].cl]
2447 			   [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2448 						  ADDRESS, SCRATCH)]);
2449 		      break;
2450 
2451 		    case CT_FIXED_FORM:
2452 		      break;
2453 		    }
2454 		  break;
2455 		}
2456 	      p += CONSTRAINT_LEN (c, p);
2457 	    }
2458 	}
2459     }
2460 }
2461 
2462 /* Return an array of operand_alternative instructions for
2463    instruction ICODE.  */
2464 
2465 const operand_alternative *
2466 preprocess_insn_constraints (unsigned int icode)
2467 {
2468   gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2469   if (this_target_recog->x_op_alt[icode])
2470     return this_target_recog->x_op_alt[icode];
2471 
2472   int n_operands = insn_data[icode].n_operands;
2473   if (n_operands == 0)
2474     return 0;
2475   /* Always provide at least one alternative so that which_op_alt ()
2476      works correctly.  If the instruction has 0 alternatives (i.e. all
2477      constraint strings are empty) then each operand in this alternative
2478      will have anything_ok set.  */
2479   int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2480   int n_entries = n_operands * n_alternatives;
2481 
2482   operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2483   const char **constraints = XALLOCAVEC (const char *, n_operands);
2484 
2485   for (int i = 0; i < n_operands; ++i)
2486     constraints[i] = insn_data[icode].operand[i].constraint;
2487   preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2488 			  NULL);
2489 
2490   this_target_recog->x_op_alt[icode] = op_alt;
2491   return op_alt;
2492 }
2493 
2494 /* After calling extract_insn, you can use this function to extract some
2495    information from the constraint strings into a more usable form.
2496    The collected data is stored in recog_op_alt.  */
2497 
2498 void
2499 preprocess_constraints (rtx_insn *insn)
2500 {
2501   int icode = INSN_CODE (insn);
2502   if (icode >= 0)
2503     recog_op_alt = preprocess_insn_constraints (icode);
2504   else
2505     {
2506       int n_operands = recog_data.n_operands;
2507       int n_alternatives = recog_data.n_alternatives;
2508       int n_entries = n_operands * n_alternatives;
2509       memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2510       preprocess_constraints (n_operands, n_alternatives,
2511 			      recog_data.constraints, asm_op_alt,
2512 			      NULL);
2513       recog_op_alt = asm_op_alt;
2514     }
2515 }
2516 
2517 /* Check the operands of an insn against the insn's operand constraints
2518    and return 1 if they match any of the alternatives in ALTERNATIVES.
2519 
2520    The information about the insn's operands, constraints, operand modes
2521    etc. is obtained from the global variables set up by extract_insn.
2522 
2523    WHICH_ALTERNATIVE is set to a number which indicates which
2524    alternative of constraints was matched: 0 for the first alternative,
2525    1 for the next, etc.
2526 
2527    In addition, when two operands are required to match
2528    and it happens that the output operand is (reg) while the
2529    input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2530    make the output operand look like the input.
2531    This is because the output operand is the one the template will print.
2532 
2533    This is used in final, just before printing the assembler code and by
2534    the routines that determine an insn's attribute.
2535 
2536    If STRICT is a positive nonzero value, it means that we have been
2537    called after reload has been completed.  In that case, we must
2538    do all checks strictly.  If it is zero, it means that we have been called
2539    before reload has completed.  In that case, we first try to see if we can
2540    find an alternative that matches strictly.  If not, we try again, this
2541    time assuming that reload will fix up the insn.  This provides a "best
2542    guess" for the alternative and is used to compute attributes of insns prior
2543    to reload.  A negative value of STRICT is used for this internal call.  */
2544 
2545 struct funny_match
2546 {
2547   int this_op, other;
2548 };
2549 
2550 int
2551 constrain_operands (int strict, alternative_mask alternatives)
2552 {
2553   const char *constraints[MAX_RECOG_OPERANDS];
2554   int matching_operands[MAX_RECOG_OPERANDS];
2555   int earlyclobber[MAX_RECOG_OPERANDS];
2556   int c;
2557 
2558   struct funny_match funny_match[MAX_RECOG_OPERANDS];
2559   int funny_match_index;
2560 
2561   which_alternative = 0;
2562   if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2563     return 1;
2564 
2565   for (c = 0; c < recog_data.n_operands; c++)
2566     {
2567       constraints[c] = recog_data.constraints[c];
2568       matching_operands[c] = -1;
2569     }
2570 
2571   do
2572     {
2573       int seen_earlyclobber_at = -1;
2574       int opno;
2575       int lose = 0;
2576       funny_match_index = 0;
2577 
2578       if (!TEST_BIT (alternatives, which_alternative))
2579 	{
2580 	  int i;
2581 
2582 	  for (i = 0; i < recog_data.n_operands; i++)
2583 	    constraints[i] = skip_alternative (constraints[i]);
2584 
2585 	  which_alternative++;
2586 	  continue;
2587 	}
2588 
2589       for (opno = 0; opno < recog_data.n_operands; opno++)
2590 	{
2591 	  rtx op = recog_data.operand[opno];
2592 	  machine_mode mode = GET_MODE (op);
2593 	  const char *p = constraints[opno];
2594 	  int offset = 0;
2595 	  int win = 0;
2596 	  int val;
2597 	  int len;
2598 
2599 	  earlyclobber[opno] = 0;
2600 
2601 	  /* A unary operator may be accepted by the predicate, but it
2602 	     is irrelevant for matching constraints.  */
2603 	  if (UNARY_P (op))
2604 	    op = XEXP (op, 0);
2605 
2606 	  if (GET_CODE (op) == SUBREG)
2607 	    {
2608 	      if (REG_P (SUBREG_REG (op))
2609 		  && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2610 		offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2611 					      GET_MODE (SUBREG_REG (op)),
2612 					      SUBREG_BYTE (op),
2613 					      GET_MODE (op));
2614 	      op = SUBREG_REG (op);
2615 	    }
2616 
2617 	  /* An empty constraint or empty alternative
2618 	     allows anything which matched the pattern.  */
2619 	  if (*p == 0 || *p == ',')
2620 	    win = 1;
2621 
2622 	  do
2623 	    switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2624 	      {
2625 	      case '\0':
2626 		len = 0;
2627 		break;
2628 	      case ',':
2629 		c = '\0';
2630 		break;
2631 
2632 	      case '#':
2633 		/* Ignore rest of this alternative as far as
2634 		   constraint checking is concerned.  */
2635 		do
2636 		  p++;
2637 		while (*p && *p != ',');
2638 		len = 0;
2639 		break;
2640 
2641 	      case '&':
2642 		earlyclobber[opno] = 1;
2643 		if (seen_earlyclobber_at < 0)
2644 		  seen_earlyclobber_at = opno;
2645 		break;
2646 
2647 	      case '0':  case '1':  case '2':  case '3':  case '4':
2648 	      case '5':  case '6':  case '7':  case '8':  case '9':
2649 		{
2650 		  /* This operand must be the same as a previous one.
2651 		     This kind of constraint is used for instructions such
2652 		     as add when they take only two operands.
2653 
2654 		     Note that the lower-numbered operand is passed first.
2655 
2656 		     If we are not testing strictly, assume that this
2657 		     constraint will be satisfied.  */
2658 
2659 		  char *end;
2660 		  int match;
2661 
2662 		  match = strtoul (p, &end, 10);
2663 		  p = end;
2664 
2665 		  if (strict < 0)
2666 		    val = 1;
2667 		  else
2668 		    {
2669 		      rtx op1 = recog_data.operand[match];
2670 		      rtx op2 = recog_data.operand[opno];
2671 
2672 		      /* A unary operator may be accepted by the predicate,
2673 			 but it is irrelevant for matching constraints.  */
2674 		      if (UNARY_P (op1))
2675 			op1 = XEXP (op1, 0);
2676 		      if (UNARY_P (op2))
2677 			op2 = XEXP (op2, 0);
2678 
2679 		      val = operands_match_p (op1, op2);
2680 		    }
2681 
2682 		  matching_operands[opno] = match;
2683 		  matching_operands[match] = opno;
2684 
2685 		  if (val != 0)
2686 		    win = 1;
2687 
2688 		  /* If output is *x and input is *--x, arrange later
2689 		     to change the output to *--x as well, since the
2690 		     output op is the one that will be printed.  */
2691 		  if (val == 2 && strict > 0)
2692 		    {
2693 		      funny_match[funny_match_index].this_op = opno;
2694 		      funny_match[funny_match_index++].other = match;
2695 		    }
2696 		}
2697 		len = 0;
2698 		break;
2699 
2700 	      case 'p':
2701 		/* p is used for address_operands.  When we are called by
2702 		   gen_reload, no one will have checked that the address is
2703 		   strictly valid, i.e., that all pseudos requiring hard regs
2704 		   have gotten them.  We also want to make sure we have a
2705 		   valid mode.  */
2706 		if ((GET_MODE (op) == VOIDmode
2707 		     || SCALAR_INT_MODE_P (GET_MODE (op)))
2708 		    && (strict <= 0
2709 			|| (strict_memory_address_p
2710 			     (recog_data.operand_mode[opno], op))))
2711 		  win = 1;
2712 		break;
2713 
2714 		/* No need to check general_operand again;
2715 		   it was done in insn-recog.c.  Well, except that reload
2716 		   doesn't check the validity of its replacements, but
2717 		   that should only matter when there's a bug.  */
2718 	      case 'g':
2719 		/* Anything goes unless it is a REG and really has a hard reg
2720 		   but the hard reg is not in the class GENERAL_REGS.  */
2721 		if (REG_P (op))
2722 		  {
2723 		    if (strict < 0
2724 			|| GENERAL_REGS == ALL_REGS
2725 			|| (reload_in_progress
2726 			    && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2727 			|| reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2728 		      win = 1;
2729 		  }
2730 		else if (strict < 0 || general_operand (op, mode))
2731 		  win = 1;
2732 		break;
2733 
2734 	      default:
2735 		{
2736 		  enum constraint_num cn = lookup_constraint (p);
2737 		  enum reg_class cl = reg_class_for_constraint (cn);
2738 		  if (cl != NO_REGS)
2739 		    {
2740 		      if (strict < 0
2741 			  || (strict == 0
2742 			      && REG_P (op)
2743 			      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2744 			  || (strict == 0 && GET_CODE (op) == SCRATCH)
2745 			  || (REG_P (op)
2746 			      && reg_fits_class_p (op, cl, offset, mode)))
2747 		        win = 1;
2748 		    }
2749 
2750 		  else if (constraint_satisfied_p (op, cn))
2751 		    win = 1;
2752 
2753 		  else if (insn_extra_memory_constraint (cn)
2754 			   /* Every memory operand can be reloaded to fit.  */
2755 			   && ((strict < 0 && MEM_P (op))
2756 			       /* Before reload, accept what reload can turn
2757 				  into a mem.  */
2758 			       || (strict < 0 && CONSTANT_P (op))
2759 			       /* Before reload, accept a pseudo,
2760 				  since LRA can turn it into a mem.  */
2761 			       || (strict < 0 && targetm.lra_p () && REG_P (op)
2762 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2763 			       /* During reload, accept a pseudo  */
2764 			       || (reload_in_progress && REG_P (op)
2765 				   && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2766 		    win = 1;
2767 		  else if (insn_extra_address_constraint (cn)
2768 			   /* Every address operand can be reloaded to fit.  */
2769 			   && strict < 0)
2770 		    win = 1;
2771 		  /* Cater to architectures like IA-64 that define extra memory
2772 		     constraints without using define_memory_constraint.  */
2773 		  else if (reload_in_progress
2774 			   && REG_P (op)
2775 			   && REGNO (op) >= FIRST_PSEUDO_REGISTER
2776 			   && reg_renumber[REGNO (op)] < 0
2777 			   && reg_equiv_mem (REGNO (op)) != 0
2778 			   && constraint_satisfied_p
2779 			      (reg_equiv_mem (REGNO (op)), cn))
2780 		    win = 1;
2781 		  break;
2782 		}
2783 	      }
2784 	  while (p += len, c);
2785 
2786 	  constraints[opno] = p;
2787 	  /* If this operand did not win somehow,
2788 	     this alternative loses.  */
2789 	  if (! win)
2790 	    lose = 1;
2791 	}
2792       /* This alternative won; the operands are ok.
2793 	 Change whichever operands this alternative says to change.  */
2794       if (! lose)
2795 	{
2796 	  int opno, eopno;
2797 
2798 	  /* See if any earlyclobber operand conflicts with some other
2799 	     operand.  */
2800 
2801 	  if (strict > 0  && seen_earlyclobber_at >= 0)
2802 	    for (eopno = seen_earlyclobber_at;
2803 		 eopno < recog_data.n_operands;
2804 		 eopno++)
2805 	      /* Ignore earlyclobber operands now in memory,
2806 		 because we would often report failure when we have
2807 		 two memory operands, one of which was formerly a REG.  */
2808 	      if (earlyclobber[eopno]
2809 		  && REG_P (recog_data.operand[eopno]))
2810 		for (opno = 0; opno < recog_data.n_operands; opno++)
2811 		  if ((MEM_P (recog_data.operand[opno])
2812 		       || recog_data.operand_type[opno] != OP_OUT)
2813 		      && opno != eopno
2814 		      /* Ignore things like match_operator operands.  */
2815 		      && *recog_data.constraints[opno] != 0
2816 		      && ! (matching_operands[opno] == eopno
2817 			    && operands_match_p (recog_data.operand[opno],
2818 						 recog_data.operand[eopno]))
2819 		      && ! safe_from_earlyclobber (recog_data.operand[opno],
2820 						   recog_data.operand[eopno]))
2821 		    lose = 1;
2822 
2823 	  if (! lose)
2824 	    {
2825 	      while (--funny_match_index >= 0)
2826 		{
2827 		  recog_data.operand[funny_match[funny_match_index].other]
2828 		    = recog_data.operand[funny_match[funny_match_index].this_op];
2829 		}
2830 
2831 	      /* For operands without < or > constraints reject side-effects.  */
2832 	      if (AUTO_INC_DEC && recog_data.is_asm)
2833 		{
2834 		  for (opno = 0; opno < recog_data.n_operands; opno++)
2835 		    if (MEM_P (recog_data.operand[opno]))
2836 		      switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2837 			{
2838 			case PRE_INC:
2839 			case POST_INC:
2840 			case PRE_DEC:
2841 			case POST_DEC:
2842 			case PRE_MODIFY:
2843 			case POST_MODIFY:
2844 			  if (strchr (recog_data.constraints[opno], '<') == NULL
2845 			      && strchr (recog_data.constraints[opno], '>')
2846 				 == NULL)
2847 			    return 0;
2848 			  break;
2849 			default:
2850 			  break;
2851 			}
2852 		}
2853 
2854 	      return 1;
2855 	    }
2856 	}
2857 
2858       which_alternative++;
2859     }
2860   while (which_alternative < recog_data.n_alternatives);
2861 
2862   which_alternative = -1;
2863   /* If we are about to reject this, but we are not to test strictly,
2864      try a very loose test.  Only return failure if it fails also.  */
2865   if (strict == 0)
2866     return constrain_operands (-1, alternatives);
2867   else
2868     return 0;
2869 }
2870 
2871 /* Return true iff OPERAND (assumed to be a REG rtx)
2872    is a hard reg in class CLASS when its regno is offset by OFFSET
2873    and changed to mode MODE.
2874    If REG occupies multiple hard regs, all of them must be in CLASS.  */
2875 
2876 bool
2877 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2878 		  machine_mode mode)
2879 {
2880   unsigned int regno = REGNO (operand);
2881 
2882   if (cl == NO_REGS)
2883     return false;
2884 
2885   /* Regno must not be a pseudo register.  Offset may be negative.  */
2886   return (HARD_REGISTER_NUM_P (regno)
2887 	  && HARD_REGISTER_NUM_P (regno + offset)
2888 	  && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2889 				regno + offset));
2890 }
2891 
2892 /* Split single instruction.  Helper function for split_all_insns and
2893    split_all_insns_noflow.  Return last insn in the sequence if successful,
2894    or NULL if unsuccessful.  */
2895 
2896 static rtx_insn *
2897 split_insn (rtx_insn *insn)
2898 {
2899   /* Split insns here to get max fine-grain parallelism.  */
2900   rtx_insn *first = PREV_INSN (insn);
2901   rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2902   rtx insn_set, last_set, note;
2903 
2904   if (last == insn)
2905     return NULL;
2906 
2907   /* If the original instruction was a single set that was known to be
2908      equivalent to a constant, see if we can say the same about the last
2909      instruction in the split sequence.  The two instructions must set
2910      the same destination.  */
2911   insn_set = single_set (insn);
2912   if (insn_set)
2913     {
2914       last_set = single_set (last);
2915       if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2916 	{
2917 	  note = find_reg_equal_equiv_note (insn);
2918 	  if (note && CONSTANT_P (XEXP (note, 0)))
2919 	    set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2920 	  else if (CONSTANT_P (SET_SRC (insn_set)))
2921 	    set_unique_reg_note (last, REG_EQUAL,
2922 				 copy_rtx (SET_SRC (insn_set)));
2923 	}
2924     }
2925 
2926   /* try_split returns the NOTE that INSN became.  */
2927   SET_INSN_DELETED (insn);
2928 
2929   /* ??? Coddle to md files that generate subregs in post-reload
2930      splitters instead of computing the proper hard register.  */
2931   if (reload_completed && first != last)
2932     {
2933       first = NEXT_INSN (first);
2934       for (;;)
2935 	{
2936 	  if (INSN_P (first))
2937 	    cleanup_subreg_operands (first);
2938 	  if (first == last)
2939 	    break;
2940 	  first = NEXT_INSN (first);
2941 	}
2942     }
2943 
2944   return last;
2945 }
2946 
2947 /* Split all insns in the function.  If UPD_LIFE, update life info after.  */
2948 
2949 void
2950 split_all_insns (void)
2951 {
2952   bool changed;
2953   bool need_cfg_cleanup = false;
2954   basic_block bb;
2955 
2956   auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2957   bitmap_clear (blocks);
2958   changed = false;
2959 
2960   FOR_EACH_BB_REVERSE_FN (bb, cfun)
2961     {
2962       rtx_insn *insn, *next;
2963       bool finish = false;
2964 
2965       rtl_profile_for_bb (bb);
2966       for (insn = BB_HEAD (bb); !finish ; insn = next)
2967 	{
2968 	  /* Can't use `next_real_insn' because that might go across
2969 	     CODE_LABELS and short-out basic blocks.  */
2970 	  next = NEXT_INSN (insn);
2971 	  finish = (insn == BB_END (bb));
2972 
2973 	  /* If INSN has a REG_EH_REGION note and we split INSN, the
2974 	     resulting split may not have/need REG_EH_REGION notes.
2975 
2976 	     If that happens and INSN was the last reference to the
2977 	     given EH region, then the EH region will become unreachable.
2978 	     We cannot leave the unreachable blocks in the CFG as that
2979 	     will trigger a checking failure.
2980 
2981 	     So track if INSN has a REG_EH_REGION note.  If so and we
2982 	     split INSN, then trigger a CFG cleanup.  */
2983 	  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2984 	  if (INSN_P (insn))
2985 	    {
2986 	      rtx set = single_set (insn);
2987 
2988 	      /* Don't split no-op move insns.  These should silently
2989 		 disappear later in final.  Splitting such insns would
2990 		 break the code that handles LIBCALL blocks.  */
2991 	      if (set && set_noop_p (set))
2992 		{
2993 		  /* Nops get in the way while scheduling, so delete them
2994 		     now if register allocation has already been done.  It
2995 		     is too risky to try to do this before register
2996 		     allocation, and there are unlikely to be very many
2997 		     nops then anyways.  */
2998 		  if (reload_completed)
2999 		      delete_insn_and_edges (insn);
3000 		  if (note)
3001 		    need_cfg_cleanup = true;
3002 		}
3003 	      else
3004 		{
3005 		  if (split_insn (insn))
3006 		    {
3007 		      bitmap_set_bit (blocks, bb->index);
3008 		      changed = true;
3009 		      if (note)
3010 			need_cfg_cleanup = true;
3011 		    }
3012 		}
3013 	    }
3014 	}
3015     }
3016 
3017   default_rtl_profile ();
3018   if (changed)
3019     {
3020       find_many_sub_basic_blocks (blocks);
3021 
3022       /* Splitting could drop an REG_EH_REGION if it potentially
3023 	 trapped in its original form, but does not in its split
3024 	 form.  Consider a FLOAT_TRUNCATE which splits into a memory
3025 	 store/load pair and -fnon-call-exceptions.  */
3026       if (need_cfg_cleanup)
3027 	cleanup_cfg (0);
3028     }
3029 
3030   checking_verify_flow_info ();
3031 }
3032 
3033 /* Same as split_all_insns, but do not expect CFG to be available.
3034    Used by machine dependent reorg passes.  */
3035 
3036 unsigned int
3037 split_all_insns_noflow (void)
3038 {
3039   rtx_insn *next, *insn;
3040 
3041   for (insn = get_insns (); insn; insn = next)
3042     {
3043       next = NEXT_INSN (insn);
3044       if (INSN_P (insn))
3045 	{
3046 	  /* Don't split no-op move insns.  These should silently
3047 	     disappear later in final.  Splitting such insns would
3048 	     break the code that handles LIBCALL blocks.  */
3049 	  rtx set = single_set (insn);
3050 	  if (set && set_noop_p (set))
3051 	    {
3052 	      /* Nops get in the way while scheduling, so delete them
3053 		 now if register allocation has already been done.  It
3054 		 is too risky to try to do this before register
3055 		 allocation, and there are unlikely to be very many
3056 		 nops then anyways.
3057 
3058 		 ??? Should we use delete_insn when the CFG isn't valid?  */
3059 	      if (reload_completed)
3060 		delete_insn_and_edges (insn);
3061 	    }
3062 	  else
3063 	    split_insn (insn);
3064 	}
3065     }
3066   return 0;
3067 }
3068 
3069 struct peep2_insn_data
3070 {
3071   rtx_insn *insn;
3072   regset live_before;
3073 };
3074 
3075 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3076 static int peep2_current;
3077 
3078 static bool peep2_do_rebuild_jump_labels;
3079 static bool peep2_do_cleanup_cfg;
3080 
3081 /* The number of instructions available to match a peep2.  */
3082 int peep2_current_count;
3083 
3084 /* A marker indicating the last insn of the block.  The live_before regset
3085    for this element is correct, indicating DF_LIVE_OUT for the block.  */
3086 #define PEEP2_EOB invalid_insn_rtx
3087 
3088 /* Wrap N to fit into the peep2_insn_data buffer.  */
3089 
3090 static int
3091 peep2_buf_position (int n)
3092 {
3093   if (n >= MAX_INSNS_PER_PEEP2 + 1)
3094     n -= MAX_INSNS_PER_PEEP2 + 1;
3095   return n;
3096 }
3097 
3098 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3099    does not exist.  Used by the recognizer to find the next insn to match
3100    in a multi-insn pattern.  */
3101 
3102 rtx_insn *
3103 peep2_next_insn (int n)
3104 {
3105   gcc_assert (n <= peep2_current_count);
3106 
3107   n = peep2_buf_position (peep2_current + n);
3108 
3109   return peep2_insn_data[n].insn;
3110 }
3111 
3112 /* Return true if REGNO is dead before the Nth non-note insn
3113    after `current'.  */
3114 
3115 int
3116 peep2_regno_dead_p (int ofs, int regno)
3117 {
3118   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3119 
3120   ofs = peep2_buf_position (peep2_current + ofs);
3121 
3122   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3123 
3124   return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3125 }
3126 
3127 /* Similarly for a REG.  */
3128 
3129 int
3130 peep2_reg_dead_p (int ofs, rtx reg)
3131 {
3132   gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3133 
3134   ofs = peep2_buf_position (peep2_current + ofs);
3135 
3136   gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3137 
3138   unsigned int end_regno = END_REGNO (reg);
3139   for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3140     if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3141       return 0;
3142   return 1;
3143 }
3144 
3145 /* Regno offset to be used in the register search.  */
3146 static int search_ofs;
3147 
3148 /* Try to find a hard register of mode MODE, matching the register class in
3149    CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3150    remains available until the end of LAST_INSN.  LAST_INSN may be NULL_RTX,
3151    in which case the only condition is that the register must be available
3152    before CURRENT_INSN.
3153    Registers that already have bits set in REG_SET will not be considered.
3154 
3155    If an appropriate register is available, it will be returned and the
3156    corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3157    returned.  */
3158 
3159 rtx
3160 peep2_find_free_register (int from, int to, const char *class_str,
3161 			  machine_mode mode, HARD_REG_SET *reg_set)
3162 {
3163   enum reg_class cl;
3164   HARD_REG_SET live;
3165   df_ref def;
3166   int i;
3167 
3168   gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3169   gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3170 
3171   from = peep2_buf_position (peep2_current + from);
3172   to = peep2_buf_position (peep2_current + to);
3173 
3174   gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3175   REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3176 
3177   while (from != to)
3178     {
3179       gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3180 
3181       /* Don't use registers set or clobbered by the insn.  */
3182       FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3183 	SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3184 
3185       from = peep2_buf_position (from + 1);
3186     }
3187 
3188   cl = reg_class_for_constraint (lookup_constraint (class_str));
3189 
3190   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3191     {
3192       int raw_regno, regno, success, j;
3193 
3194       /* Distribute the free registers as much as possible.  */
3195       raw_regno = search_ofs + i;
3196       if (raw_regno >= FIRST_PSEUDO_REGISTER)
3197 	raw_regno -= FIRST_PSEUDO_REGISTER;
3198 #ifdef REG_ALLOC_ORDER
3199       regno = reg_alloc_order[raw_regno];
3200 #else
3201       regno = raw_regno;
3202 #endif
3203 
3204       /* Can it support the mode we need?  */
3205       if (!targetm.hard_regno_mode_ok (regno, mode))
3206 	continue;
3207 
3208       success = 1;
3209       for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3210 	{
3211 	  /* Don't allocate fixed registers.  */
3212 	  if (fixed_regs[regno + j])
3213 	    {
3214 	      success = 0;
3215 	      break;
3216 	    }
3217 	  /* Don't allocate global registers.  */
3218 	  if (global_regs[regno + j])
3219 	    {
3220 	      success = 0;
3221 	      break;
3222 	    }
3223 	  /* Make sure the register is of the right class.  */
3224 	  if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3225 	    {
3226 	      success = 0;
3227 	      break;
3228 	    }
3229 	  /* And that we don't create an extra save/restore.  */
3230 	  if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3231 	    {
3232 	      success = 0;
3233 	      break;
3234 	    }
3235 
3236 	  if (! targetm.hard_regno_scratch_ok (regno + j))
3237 	    {
3238 	      success = 0;
3239 	      break;
3240 	    }
3241 
3242 	  /* And we don't clobber traceback for noreturn functions.  */
3243 	  if ((regno + j == FRAME_POINTER_REGNUM
3244 	       || regno + j == HARD_FRAME_POINTER_REGNUM)
3245 	      && (! reload_completed || frame_pointer_needed))
3246 	    {
3247 	      success = 0;
3248 	      break;
3249 	    }
3250 
3251 	  if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3252 	      || TEST_HARD_REG_BIT (live, regno + j))
3253 	    {
3254 	      success = 0;
3255 	      break;
3256 	    }
3257 	}
3258 
3259       if (success)
3260 	{
3261 	  add_to_hard_reg_set (reg_set, mode, regno);
3262 
3263 	  /* Start the next search with the next register.  */
3264 	  if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3265 	    raw_regno = 0;
3266 	  search_ofs = raw_regno;
3267 
3268 	  return gen_rtx_REG (mode, regno);
3269 	}
3270     }
3271 
3272   search_ofs = 0;
3273   return NULL_RTX;
3274 }
3275 
3276 /* Forget all currently tracked instructions, only remember current
3277    LIVE regset.  */
3278 
3279 static void
3280 peep2_reinit_state (regset live)
3281 {
3282   int i;
3283 
3284   /* Indicate that all slots except the last holds invalid data.  */
3285   for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3286     peep2_insn_data[i].insn = NULL;
3287   peep2_current_count = 0;
3288 
3289   /* Indicate that the last slot contains live_after data.  */
3290   peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3291   peep2_current = MAX_INSNS_PER_PEEP2;
3292 
3293   COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3294 }
3295 
3296 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3297    starting at INSN.  Perform the replacement, removing the old insns and
3298    replacing them with ATTEMPT.  Returns the last insn emitted, or NULL
3299    if the replacement is rejected.  */
3300 
3301 static rtx_insn *
3302 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3303 {
3304   int i;
3305   rtx_insn *last, *before_try, *x;
3306   rtx eh_note, as_note;
3307   rtx_insn *old_insn;
3308   rtx_insn *new_insn;
3309   bool was_call = false;
3310 
3311   /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3312      match more than one insn, or to be split into more than one insn.  */
3313   old_insn = peep2_insn_data[peep2_current].insn;
3314   if (RTX_FRAME_RELATED_P (old_insn))
3315     {
3316       bool any_note = false;
3317       rtx note;
3318 
3319       if (match_len != 0)
3320 	return NULL;
3321 
3322       /* Look for one "active" insn.  I.e. ignore any "clobber" insns that
3323 	 may be in the stream for the purpose of register allocation.  */
3324       if (active_insn_p (attempt))
3325 	new_insn = attempt;
3326       else
3327 	new_insn = next_active_insn (attempt);
3328       if (next_active_insn (new_insn))
3329 	return NULL;
3330 
3331       /* We have a 1-1 replacement.  Copy over any frame-related info.  */
3332       RTX_FRAME_RELATED_P (new_insn) = 1;
3333 
3334       /* Allow the backend to fill in a note during the split.  */
3335       for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3336 	switch (REG_NOTE_KIND (note))
3337 	  {
3338 	  case REG_FRAME_RELATED_EXPR:
3339 	  case REG_CFA_DEF_CFA:
3340 	  case REG_CFA_ADJUST_CFA:
3341 	  case REG_CFA_OFFSET:
3342 	  case REG_CFA_REGISTER:
3343 	  case REG_CFA_EXPRESSION:
3344 	  case REG_CFA_RESTORE:
3345 	  case REG_CFA_SET_VDRAP:
3346 	    any_note = true;
3347 	    break;
3348 	  default:
3349 	    break;
3350 	  }
3351 
3352       /* If the backend didn't supply a note, copy one over.  */
3353       if (!any_note)
3354         for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3355 	  switch (REG_NOTE_KIND (note))
3356 	    {
3357 	    case REG_FRAME_RELATED_EXPR:
3358 	    case REG_CFA_DEF_CFA:
3359 	    case REG_CFA_ADJUST_CFA:
3360 	    case REG_CFA_OFFSET:
3361 	    case REG_CFA_REGISTER:
3362 	    case REG_CFA_EXPRESSION:
3363 	    case REG_CFA_RESTORE:
3364 	    case REG_CFA_SET_VDRAP:
3365 	      add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3366 	      any_note = true;
3367 	      break;
3368 	    default:
3369 	      break;
3370 	    }
3371 
3372       /* If there still isn't a note, make sure the unwind info sees the
3373 	 same expression as before the split.  */
3374       if (!any_note)
3375 	{
3376 	  rtx old_set, new_set;
3377 
3378 	  /* The old insn had better have been simple, or annotated.  */
3379 	  old_set = single_set (old_insn);
3380 	  gcc_assert (old_set != NULL);
3381 
3382 	  new_set = single_set (new_insn);
3383 	  if (!new_set || !rtx_equal_p (new_set, old_set))
3384 	    add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3385 	}
3386 
3387       /* Copy prologue/epilogue status.  This is required in order to keep
3388 	 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state.  */
3389       maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3390     }
3391 
3392   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3393      in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3394      cfg-related call notes.  */
3395   for (i = 0; i <= match_len; ++i)
3396     {
3397       int j;
3398       rtx note;
3399 
3400       j = peep2_buf_position (peep2_current + i);
3401       old_insn = peep2_insn_data[j].insn;
3402       if (!CALL_P (old_insn))
3403 	continue;
3404       was_call = true;
3405 
3406       new_insn = attempt;
3407       while (new_insn != NULL_RTX)
3408 	{
3409 	  if (CALL_P (new_insn))
3410 	    break;
3411 	  new_insn = NEXT_INSN (new_insn);
3412 	}
3413 
3414       gcc_assert (new_insn != NULL_RTX);
3415 
3416       CALL_INSN_FUNCTION_USAGE (new_insn)
3417 	= CALL_INSN_FUNCTION_USAGE (old_insn);
3418       SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3419 
3420       for (note = REG_NOTES (old_insn);
3421 	   note;
3422 	   note = XEXP (note, 1))
3423 	switch (REG_NOTE_KIND (note))
3424 	  {
3425 	  case REG_NORETURN:
3426 	  case REG_SETJMP:
3427 	  case REG_TM:
3428 	  case REG_CALL_NOCF_CHECK:
3429 	    add_reg_note (new_insn, REG_NOTE_KIND (note),
3430 			  XEXP (note, 0));
3431 	    break;
3432 	  default:
3433 	    /* Discard all other reg notes.  */
3434 	    break;
3435 	  }
3436 
3437       /* Croak if there is another call in the sequence.  */
3438       while (++i <= match_len)
3439 	{
3440 	  j = peep2_buf_position (peep2_current + i);
3441 	  old_insn = peep2_insn_data[j].insn;
3442 	  gcc_assert (!CALL_P (old_insn));
3443 	}
3444       break;
3445     }
3446 
3447   /* If we matched any instruction that had a REG_ARGS_SIZE, then
3448      move those notes over to the new sequence.  */
3449   as_note = NULL;
3450   for (i = match_len; i >= 0; --i)
3451     {
3452       int j = peep2_buf_position (peep2_current + i);
3453       old_insn = peep2_insn_data[j].insn;
3454 
3455       as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3456       if (as_note)
3457 	break;
3458     }
3459 
3460   i = peep2_buf_position (peep2_current + match_len);
3461   eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3462 
3463   /* Replace the old sequence with the new.  */
3464   rtx_insn *peepinsn = peep2_insn_data[i].insn;
3465   last = emit_insn_after_setloc (attempt,
3466 				 peep2_insn_data[i].insn,
3467 				 INSN_LOCATION (peepinsn));
3468   if (JUMP_P (peepinsn) && JUMP_P (last))
3469     CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3470   before_try = PREV_INSN (insn);
3471   delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3472 
3473   /* Re-insert the EH_REGION notes.  */
3474   if (eh_note || (was_call && nonlocal_goto_handler_labels))
3475     {
3476       edge eh_edge;
3477       edge_iterator ei;
3478 
3479       FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3480 	if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3481 	  break;
3482 
3483       if (eh_note)
3484 	copy_reg_eh_region_note_backward (eh_note, last, before_try);
3485 
3486       if (eh_edge)
3487 	for (x = last; x != before_try; x = PREV_INSN (x))
3488 	  if (x != BB_END (bb)
3489 	      && (can_throw_internal (x)
3490 		  || can_nonlocal_goto (x)))
3491 	    {
3492 	      edge nfte, nehe;
3493 	      int flags;
3494 
3495 	      nfte = split_block (bb, x);
3496 	      flags = (eh_edge->flags
3497 		       & (EDGE_EH | EDGE_ABNORMAL));
3498 	      if (CALL_P (x))
3499 		flags |= EDGE_ABNORMAL_CALL;
3500 	      nehe = make_edge (nfte->src, eh_edge->dest,
3501 				flags);
3502 
3503 	      nehe->probability = eh_edge->probability;
3504 	      nfte->probability = nehe->probability.invert ();
3505 
3506 	      peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3507 	      bb = nfte->src;
3508 	      eh_edge = nehe;
3509 	    }
3510 
3511       /* Converting possibly trapping insn to non-trapping is
3512 	 possible.  Zap dummy outgoing edges.  */
3513       peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3514     }
3515 
3516   /* Re-insert the ARGS_SIZE notes.  */
3517   if (as_note)
3518     fixup_args_size_notes (before_try, last, get_args_size (as_note));
3519 
3520   /* If we generated a jump instruction, it won't have
3521      JUMP_LABEL set.  Recompute after we're done.  */
3522   for (x = last; x != before_try; x = PREV_INSN (x))
3523     if (JUMP_P (x))
3524       {
3525 	peep2_do_rebuild_jump_labels = true;
3526 	break;
3527       }
3528 
3529   return last;
3530 }
3531 
3532 /* After performing a replacement in basic block BB, fix up the life
3533    information in our buffer.  LAST is the last of the insns that we
3534    emitted as a replacement.  PREV is the insn before the start of
3535    the replacement.  MATCH_LEN is the number of instructions that were
3536    matched, and which now need to be replaced in the buffer.  */
3537 
3538 static void
3539 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3540 		   rtx_insn *prev)
3541 {
3542   int i = peep2_buf_position (peep2_current + match_len + 1);
3543   rtx_insn *x;
3544   regset_head live;
3545 
3546   INIT_REG_SET (&live);
3547   COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3548 
3549   gcc_assert (peep2_current_count >= match_len + 1);
3550   peep2_current_count -= match_len + 1;
3551 
3552   x = last;
3553   do
3554     {
3555       if (INSN_P (x))
3556 	{
3557 	  df_insn_rescan (x);
3558 	  if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3559 	    {
3560 	      peep2_current_count++;
3561 	      if (--i < 0)
3562 		i = MAX_INSNS_PER_PEEP2;
3563 	      peep2_insn_data[i].insn = x;
3564 	      df_simulate_one_insn_backwards (bb, x, &live);
3565 	      COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3566 	    }
3567 	}
3568       x = PREV_INSN (x);
3569     }
3570   while (x != prev);
3571   CLEAR_REG_SET (&live);
3572 
3573   peep2_current = i;
3574 }
3575 
3576 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3577    Return true if we added it, false otherwise.  The caller will try to match
3578    peepholes against the buffer if we return false; otherwise it will try to
3579    add more instructions to the buffer.  */
3580 
3581 static bool
3582 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3583 {
3584   int pos;
3585 
3586   /* Once we have filled the maximum number of insns the buffer can hold,
3587      allow the caller to match the insns against peepholes.  We wait until
3588      the buffer is full in case the target has similar peepholes of different
3589      length; we always want to match the longest if possible.  */
3590   if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3591     return false;
3592 
3593   /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3594      any other pattern, lest it change the semantics of the frame info.  */
3595   if (RTX_FRAME_RELATED_P (insn))
3596     {
3597       /* Let the buffer drain first.  */
3598       if (peep2_current_count > 0)
3599 	return false;
3600       /* Now the insn will be the only thing in the buffer.  */
3601     }
3602 
3603   pos = peep2_buf_position (peep2_current + peep2_current_count);
3604   peep2_insn_data[pos].insn = insn;
3605   COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3606   peep2_current_count++;
3607 
3608   df_simulate_one_insn_forwards (bb, insn, live);
3609   return true;
3610 }
3611 
3612 /* Perform the peephole2 optimization pass.  */
3613 
3614 static void
3615 peephole2_optimize (void)
3616 {
3617   rtx_insn *insn;
3618   bitmap live;
3619   int i;
3620   basic_block bb;
3621 
3622   peep2_do_cleanup_cfg = false;
3623   peep2_do_rebuild_jump_labels = false;
3624 
3625   df_set_flags (DF_LR_RUN_DCE);
3626   df_note_add_problem ();
3627   df_analyze ();
3628 
3629   /* Initialize the regsets we're going to use.  */
3630   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3631     peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3632   search_ofs = 0;
3633   live = BITMAP_ALLOC (&reg_obstack);
3634 
3635   FOR_EACH_BB_REVERSE_FN (bb, cfun)
3636     {
3637       bool past_end = false;
3638       int pos;
3639 
3640       rtl_profile_for_bb (bb);
3641 
3642       /* Start up propagation.  */
3643       bitmap_copy (live, DF_LR_IN (bb));
3644       df_simulate_initialize_forwards (bb, live);
3645       peep2_reinit_state (live);
3646 
3647       insn = BB_HEAD (bb);
3648       for (;;)
3649 	{
3650 	  rtx_insn *attempt, *head;
3651 	  int match_len;
3652 
3653 	  if (!past_end && !NONDEBUG_INSN_P (insn))
3654 	    {
3655 	    next_insn:
3656 	      insn = NEXT_INSN (insn);
3657 	      if (insn == NEXT_INSN (BB_END (bb)))
3658 		past_end = true;
3659 	      continue;
3660 	    }
3661 	  if (!past_end && peep2_fill_buffer (bb, insn, live))
3662 	    goto next_insn;
3663 
3664 	  /* If we did not fill an empty buffer, it signals the end of the
3665 	     block.  */
3666 	  if (peep2_current_count == 0)
3667 	    break;
3668 
3669 	  /* The buffer filled to the current maximum, so try to match.  */
3670 
3671 	  pos = peep2_buf_position (peep2_current + peep2_current_count);
3672 	  peep2_insn_data[pos].insn = PEEP2_EOB;
3673 	  COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3674 
3675 	  /* Match the peephole.  */
3676 	  head = peep2_insn_data[peep2_current].insn;
3677 	  attempt = peephole2_insns (PATTERN (head), head, &match_len);
3678 	  if (attempt != NULL)
3679 	    {
3680 	      rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3681 	      if (last)
3682 		{
3683 		  peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3684 		  continue;
3685 		}
3686 	    }
3687 
3688 	  /* No match: advance the buffer by one insn.  */
3689 	  peep2_current = peep2_buf_position (peep2_current + 1);
3690 	  peep2_current_count--;
3691 	}
3692     }
3693 
3694   default_rtl_profile ();
3695   for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3696     BITMAP_FREE (peep2_insn_data[i].live_before);
3697   BITMAP_FREE (live);
3698   if (peep2_do_rebuild_jump_labels)
3699     rebuild_jump_labels (get_insns ());
3700   if (peep2_do_cleanup_cfg)
3701     cleanup_cfg (CLEANUP_CFG_CHANGED);
3702 }
3703 
3704 /* Common predicates for use with define_bypass.  */
3705 
3706 /* Helper function for store_data_bypass_p, handle just a single SET
3707    IN_SET.  */
3708 
3709 static bool
3710 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
3711 {
3712   if (!MEM_P (SET_DEST (in_set)))
3713     return false;
3714 
3715   rtx out_set = single_set (out_insn);
3716   if (out_set)
3717     return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
3718 
3719   rtx out_pat = PATTERN (out_insn);
3720   if (GET_CODE (out_pat) != PARALLEL)
3721     return false;
3722 
3723   for (int i = 0; i < XVECLEN (out_pat, 0); i++)
3724     {
3725       rtx out_exp = XVECEXP (out_pat, 0, i);
3726 
3727       if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE
3728 	  || GET_CODE (out_exp) == CLOBBER_HIGH)
3729 	continue;
3730 
3731       gcc_assert (GET_CODE (out_exp) == SET);
3732 
3733       if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3734 	return false;
3735     }
3736 
3737   return true;
3738 }
3739 
3740 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3741    data not the address operand(s) of the store.  IN_INSN and OUT_INSN
3742    must be either a single_set or a PARALLEL with SETs inside.  */
3743 
3744 int
3745 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3746 {
3747   rtx in_set = single_set (in_insn);
3748   if (in_set)
3749     return store_data_bypass_p_1 (out_insn, in_set);
3750 
3751   rtx in_pat = PATTERN (in_insn);
3752   if (GET_CODE (in_pat) != PARALLEL)
3753     return false;
3754 
3755   for (int i = 0; i < XVECLEN (in_pat, 0); i++)
3756     {
3757       rtx in_exp = XVECEXP (in_pat, 0, i);
3758 
3759       if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE
3760 	  || GET_CODE (in_exp) == CLOBBER_HIGH)
3761 	continue;
3762 
3763       gcc_assert (GET_CODE (in_exp) == SET);
3764 
3765       if (!store_data_bypass_p_1 (out_insn, in_exp))
3766 	return false;
3767     }
3768 
3769   return true;
3770 }
3771 
3772 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3773    condition, and not the THEN or ELSE branch.  OUT_INSN may be either a single
3774    or multiple set; IN_INSN should be single_set for truth, but for convenience
3775    of insn categorization may be any JUMP or CALL insn.  */
3776 
3777 int
3778 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3779 {
3780   rtx out_set, in_set;
3781 
3782   in_set = single_set (in_insn);
3783   if (! in_set)
3784     {
3785       gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3786       return false;
3787     }
3788 
3789   if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3790     return false;
3791   in_set = SET_SRC (in_set);
3792 
3793   out_set = single_set (out_insn);
3794   if (out_set)
3795     {
3796       if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3797 	  || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3798 	return false;
3799     }
3800   else
3801     {
3802       rtx out_pat;
3803       int i;
3804 
3805       out_pat = PATTERN (out_insn);
3806       gcc_assert (GET_CODE (out_pat) == PARALLEL);
3807 
3808       for (i = 0; i < XVECLEN (out_pat, 0); i++)
3809 	{
3810 	  rtx exp = XVECEXP (out_pat, 0, i);
3811 
3812 	  if (GET_CODE (exp) == CLOBBER  || GET_CODE (exp) == CLOBBER_HIGH)
3813 	    continue;
3814 
3815 	  gcc_assert (GET_CODE (exp) == SET);
3816 
3817 	  if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3818 	      || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3819 	    return false;
3820 	}
3821     }
3822 
3823   return true;
3824 }
3825 
3826 static unsigned int
3827 rest_of_handle_peephole2 (void)
3828 {
3829   if (HAVE_peephole2)
3830     peephole2_optimize ();
3831 
3832   return 0;
3833 }
3834 
3835 namespace {
3836 
3837 const pass_data pass_data_peephole2 =
3838 {
3839   RTL_PASS, /* type */
3840   "peephole2", /* name */
3841   OPTGROUP_NONE, /* optinfo_flags */
3842   TV_PEEPHOLE2, /* tv_id */
3843   0, /* properties_required */
3844   0, /* properties_provided */
3845   0, /* properties_destroyed */
3846   0, /* todo_flags_start */
3847   TODO_df_finish, /* todo_flags_finish */
3848 };
3849 
3850 class pass_peephole2 : public rtl_opt_pass
3851 {
3852 public:
3853   pass_peephole2 (gcc::context *ctxt)
3854     : rtl_opt_pass (pass_data_peephole2, ctxt)
3855   {}
3856 
3857   /* opt_pass methods: */
3858   /* The epiphany backend creates a second instance of this pass, so we need
3859      a clone method.  */
3860   opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3861   virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3862   virtual unsigned int execute (function *)
3863     {
3864       return rest_of_handle_peephole2 ();
3865     }
3866 
3867 }; // class pass_peephole2
3868 
3869 } // anon namespace
3870 
3871 rtl_opt_pass *
3872 make_pass_peephole2 (gcc::context *ctxt)
3873 {
3874   return new pass_peephole2 (ctxt);
3875 }
3876 
3877 namespace {
3878 
3879 const pass_data pass_data_split_all_insns =
3880 {
3881   RTL_PASS, /* type */
3882   "split1", /* name */
3883   OPTGROUP_NONE, /* optinfo_flags */
3884   TV_NONE, /* tv_id */
3885   0, /* properties_required */
3886   PROP_rtl_split_insns, /* properties_provided */
3887   0, /* properties_destroyed */
3888   0, /* todo_flags_start */
3889   0, /* todo_flags_finish */
3890 };
3891 
3892 class pass_split_all_insns : public rtl_opt_pass
3893 {
3894 public:
3895   pass_split_all_insns (gcc::context *ctxt)
3896     : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3897   {}
3898 
3899   /* opt_pass methods: */
3900   /* The epiphany backend creates a second instance of this pass, so
3901      we need a clone method.  */
3902   opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3903   virtual unsigned int execute (function *)
3904     {
3905       split_all_insns ();
3906       return 0;
3907     }
3908 
3909 }; // class pass_split_all_insns
3910 
3911 } // anon namespace
3912 
3913 rtl_opt_pass *
3914 make_pass_split_all_insns (gcc::context *ctxt)
3915 {
3916   return new pass_split_all_insns (ctxt);
3917 }
3918 
3919 namespace {
3920 
3921 const pass_data pass_data_split_after_reload =
3922 {
3923   RTL_PASS, /* type */
3924   "split2", /* name */
3925   OPTGROUP_NONE, /* optinfo_flags */
3926   TV_NONE, /* tv_id */
3927   0, /* properties_required */
3928   0, /* properties_provided */
3929   0, /* properties_destroyed */
3930   0, /* todo_flags_start */
3931   0, /* todo_flags_finish */
3932 };
3933 
3934 class pass_split_after_reload : public rtl_opt_pass
3935 {
3936 public:
3937   pass_split_after_reload (gcc::context *ctxt)
3938     : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3939   {}
3940 
3941   /* opt_pass methods: */
3942   virtual bool gate (function *)
3943     {
3944       /* If optimizing, then go ahead and split insns now.  */
3945       if (optimize > 0)
3946 	return true;
3947 
3948 #ifdef STACK_REGS
3949       return true;
3950 #else
3951       return false;
3952 #endif
3953     }
3954 
3955   virtual unsigned int execute (function *)
3956     {
3957       split_all_insns ();
3958       return 0;
3959     }
3960 
3961 }; // class pass_split_after_reload
3962 
3963 } // anon namespace
3964 
3965 rtl_opt_pass *
3966 make_pass_split_after_reload (gcc::context *ctxt)
3967 {
3968   return new pass_split_after_reload (ctxt);
3969 }
3970 
3971 namespace {
3972 
3973 const pass_data pass_data_split_before_regstack =
3974 {
3975   RTL_PASS, /* type */
3976   "split3", /* name */
3977   OPTGROUP_NONE, /* optinfo_flags */
3978   TV_NONE, /* tv_id */
3979   0, /* properties_required */
3980   0, /* properties_provided */
3981   0, /* properties_destroyed */
3982   0, /* todo_flags_start */
3983   0, /* todo_flags_finish */
3984 };
3985 
3986 class pass_split_before_regstack : public rtl_opt_pass
3987 {
3988 public:
3989   pass_split_before_regstack (gcc::context *ctxt)
3990     : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3991   {}
3992 
3993   /* opt_pass methods: */
3994   virtual bool gate (function *);
3995   virtual unsigned int execute (function *)
3996     {
3997       split_all_insns ();
3998       return 0;
3999     }
4000 
4001 }; // class pass_split_before_regstack
4002 
4003 bool
4004 pass_split_before_regstack::gate (function *)
4005 {
4006 #if HAVE_ATTR_length && defined (STACK_REGS)
4007   /* If flow2 creates new instructions which need splitting
4008      and scheduling after reload is not done, they might not be
4009      split until final which doesn't allow splitting
4010      if HAVE_ATTR_length.  */
4011 # ifdef INSN_SCHEDULING
4012   return (optimize && !flag_schedule_insns_after_reload);
4013 # else
4014   return (optimize);
4015 # endif
4016 #else
4017   return 0;
4018 #endif
4019 }
4020 
4021 } // anon namespace
4022 
4023 rtl_opt_pass *
4024 make_pass_split_before_regstack (gcc::context *ctxt)
4025 {
4026   return new pass_split_before_regstack (ctxt);
4027 }
4028 
4029 static unsigned int
4030 rest_of_handle_split_before_sched2 (void)
4031 {
4032 #ifdef INSN_SCHEDULING
4033   split_all_insns ();
4034 #endif
4035   return 0;
4036 }
4037 
4038 namespace {
4039 
4040 const pass_data pass_data_split_before_sched2 =
4041 {
4042   RTL_PASS, /* type */
4043   "split4", /* name */
4044   OPTGROUP_NONE, /* optinfo_flags */
4045   TV_NONE, /* tv_id */
4046   0, /* properties_required */
4047   0, /* properties_provided */
4048   0, /* properties_destroyed */
4049   0, /* todo_flags_start */
4050   0, /* todo_flags_finish */
4051 };
4052 
4053 class pass_split_before_sched2 : public rtl_opt_pass
4054 {
4055 public:
4056   pass_split_before_sched2 (gcc::context *ctxt)
4057     : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4058   {}
4059 
4060   /* opt_pass methods: */
4061   virtual bool gate (function *)
4062     {
4063 #ifdef INSN_SCHEDULING
4064       return optimize > 0 && flag_schedule_insns_after_reload;
4065 #else
4066       return false;
4067 #endif
4068     }
4069 
4070   virtual unsigned int execute (function *)
4071     {
4072       return rest_of_handle_split_before_sched2 ();
4073     }
4074 
4075 }; // class pass_split_before_sched2
4076 
4077 } // anon namespace
4078 
4079 rtl_opt_pass *
4080 make_pass_split_before_sched2 (gcc::context *ctxt)
4081 {
4082   return new pass_split_before_sched2 (ctxt);
4083 }
4084 
4085 namespace {
4086 
4087 const pass_data pass_data_split_for_shorten_branches =
4088 {
4089   RTL_PASS, /* type */
4090   "split5", /* name */
4091   OPTGROUP_NONE, /* optinfo_flags */
4092   TV_NONE, /* tv_id */
4093   0, /* properties_required */
4094   0, /* properties_provided */
4095   0, /* properties_destroyed */
4096   0, /* todo_flags_start */
4097   0, /* todo_flags_finish */
4098 };
4099 
4100 class pass_split_for_shorten_branches : public rtl_opt_pass
4101 {
4102 public:
4103   pass_split_for_shorten_branches (gcc::context *ctxt)
4104     : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4105   {}
4106 
4107   /* opt_pass methods: */
4108   virtual bool gate (function *)
4109     {
4110       /* The placement of the splitting that we do for shorten_branches
4111 	 depends on whether regstack is used by the target or not.  */
4112 #if HAVE_ATTR_length && !defined (STACK_REGS)
4113       return true;
4114 #else
4115       return false;
4116 #endif
4117     }
4118 
4119   virtual unsigned int execute (function *)
4120     {
4121       return split_all_insns_noflow ();
4122     }
4123 
4124 }; // class pass_split_for_shorten_branches
4125 
4126 } // anon namespace
4127 
4128 rtl_opt_pass *
4129 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4130 {
4131   return new pass_split_for_shorten_branches (ctxt);
4132 }
4133 
4134 /* (Re)initialize the target information after a change in target.  */
4135 
4136 void
4137 recog_init ()
4138 {
4139   /* The information is zero-initialized, so we don't need to do anything
4140      first time round.  */
4141   if (!this_target_recog->x_initialized)
4142     {
4143       this_target_recog->x_initialized = true;
4144       return;
4145     }
4146   memset (this_target_recog->x_bool_attr_masks, 0,
4147 	  sizeof (this_target_recog->x_bool_attr_masks));
4148   for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4149     if (this_target_recog->x_op_alt[i])
4150       {
4151 	free (this_target_recog->x_op_alt[i]);
4152 	this_target_recog->x_op_alt[i] = 0;
4153       }
4154 }
4155