xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/combine.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Optimize by combining instructions for GNU compiler.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23    Portable Optimizer, but redone to work on our list-structured
24    representation for RTL instead of their string representation.
25 
26    The LOG_LINKS of each insn identify the most recent assignment
27    to each REG used in the insn.  It is a list of previous insns,
28    each of which contains a SET for a REG that is used in this insn
29    and not used or set in between.  LOG_LINKs never cross basic blocks.
30    They were set up by the preceding pass (lifetime analysis).
31 
32    We try to combine each pair of insns joined by a logical link.
33    We also try to combine triples of insns A, B and C when
34    C has a link back to B and B has a link back to A.
35 
36    LOG_LINKS does not have links for use of the CC0.  They don't
37    need to, because the insn that sets the CC0 is always immediately
38    before the insn that tests it.  So we always regard a branch
39    insn as having a logical link to the preceding insn.  The same is true
40    for an insn explicitly using CC0.
41 
42    We check (with use_crosses_set_p) to avoid combining in such a way
43    as to move a computation to a place where its value would be different.
44 
45    Combination is done by mathematically substituting the previous
46    insn(s) values for the regs they set into the expressions in
47    the later insns that refer to these regs.  If the result is a valid insn
48    for our target machine, according to the machine description,
49    we install it, delete the earlier insns, and update the data flow
50    information (LOG_LINKS and REG_NOTES) for what we did.
51 
52    There are a few exceptions where the dataflow information isn't
53    completely updated (however this is only a local issue since it is
54    regenerated before the next pass that uses it):
55 
56    - reg_live_length is not updated
57    - reg_n_refs is not adjusted in the rare case when a register is
58      no longer required in a computation
59    - there are extremely rare cases (see distribute_notes) when a
60      REG_DEAD note is lost
61    - a LOG_LINKS entry that refers to an insn with multiple SETs may be
62      removed because there is no way to know which register it was
63      linking
64 
65    To simplify substitution, we combine only when the earlier insn(s)
66    consist of only a single assignment.  To simplify updating afterward,
67    we never combine when a subroutine call appears in the middle.
68 
69    Since we do not represent assignments to CC0 explicitly except when that
70    is all an insn does, there is no LOG_LINKS entry in an insn that uses
71    the condition code for the insn that set the condition code.
72    Fortunately, these two insns must be consecutive.
73    Therefore, every JUMP_INSN is taken to have an implicit logical link
74    to the preceding insn.  This is not quite right, since non-jumps can
75    also use the condition code; but in practice such insns would not
76    combine anyway.  */
77 
78 #include "config.h"
79 #include "system.h"
80 #include "coretypes.h"
81 #include "tm.h"
82 #include "rtl.h"
83 #include "tree.h"
84 #include "tm_p.h"
85 #include "flags.h"
86 #include "regs.h"
87 #include "hard-reg-set.h"
88 #include "basic-block.h"
89 #include "insn-config.h"
90 #include "function.h"
91 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
92 #include "expr.h"
93 #include "insn-attr.h"
94 #include "recog.h"
95 #include "real.h"
96 #include "toplev.h"
97 #include "target.h"
98 #include "optabs.h"
99 #include "insn-codes.h"
100 #include "rtlhooks-def.h"
101 /* Include output.h for dump_file.  */
102 #include "output.h"
103 #include "params.h"
104 #include "timevar.h"
105 #include "tree-pass.h"
106 #include "df.h"
107 #include "cgraph.h"
108 
109 /* Number of attempts to combine instructions in this function.  */
110 
111 static int combine_attempts;
112 
113 /* Number of attempts that got as far as substitution in this function.  */
114 
115 static int combine_merges;
116 
117 /* Number of instructions combined with added SETs in this function.  */
118 
119 static int combine_extras;
120 
121 /* Number of instructions combined in this function.  */
122 
123 static int combine_successes;
124 
125 /* Totals over entire compilation.  */
126 
127 static int total_attempts, total_merges, total_extras, total_successes;
128 
129 /* combine_instructions may try to replace the right hand side of the
130    second instruction with the value of an associated REG_EQUAL note
131    before throwing it at try_combine.  That is problematic when there
132    is a REG_DEAD note for a register used in the old right hand side
133    and can cause distribute_notes to do wrong things.  This is the
134    second instruction if it has been so modified, null otherwise.  */
135 
136 static rtx i2mod;
137 
138 /* When I2MOD is nonnull, this is a copy of the old right hand side.  */
139 
140 static rtx i2mod_old_rhs;
141 
142 /* When I2MOD is nonnull, this is a copy of the new right hand side.  */
143 
144 static rtx i2mod_new_rhs;
145 
146 typedef struct reg_stat_struct {
147   /* Record last point of death of (hard or pseudo) register n.  */
148   rtx				last_death;
149 
150   /* Record last point of modification of (hard or pseudo) register n.  */
151   rtx				last_set;
152 
153   /* The next group of fields allows the recording of the last value assigned
154      to (hard or pseudo) register n.  We use this information to see if an
155      operation being processed is redundant given a prior operation performed
156      on the register.  For example, an `and' with a constant is redundant if
157      all the zero bits are already known to be turned off.
158 
159      We use an approach similar to that used by cse, but change it in the
160      following ways:
161 
162      (1) We do not want to reinitialize at each label.
163      (2) It is useful, but not critical, to know the actual value assigned
164 	 to a register.  Often just its form is helpful.
165 
166      Therefore, we maintain the following fields:
167 
168      last_set_value		the last value assigned
169      last_set_label		records the value of label_tick when the
170 				register was assigned
171      last_set_table_tick	records the value of label_tick when a
172 				value using the register is assigned
173      last_set_invalid		set to nonzero when it is not valid
174 				to use the value of this register in some
175 				register's value
176 
177      To understand the usage of these tables, it is important to understand
178      the distinction between the value in last_set_value being valid and
179      the register being validly contained in some other expression in the
180      table.
181 
182      (The next two parameters are out of date).
183 
184      reg_stat[i].last_set_value is valid if it is nonzero, and either
185      reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
186 
187      Register I may validly appear in any expression returned for the value
188      of another register if reg_n_sets[i] is 1.  It may also appear in the
189      value for register J if reg_stat[j].last_set_invalid is zero, or
190      reg_stat[i].last_set_label < reg_stat[j].last_set_label.
191 
192      If an expression is found in the table containing a register which may
193      not validly appear in an expression, the register is replaced by
194      something that won't match, (clobber (const_int 0)).  */
195 
196   /* Record last value assigned to (hard or pseudo) register n.  */
197 
198   rtx				last_set_value;
199 
200   /* Record the value of label_tick when an expression involving register n
201      is placed in last_set_value.  */
202 
203   int				last_set_table_tick;
204 
205   /* Record the value of label_tick when the value for register n is placed in
206      last_set_value.  */
207 
208   int				last_set_label;
209 
210   /* These fields are maintained in parallel with last_set_value and are
211      used to store the mode in which the register was last set, the bits
212      that were known to be zero when it was last set, and the number of
213      sign bits copies it was known to have when it was last set.  */
214 
215   unsigned HOST_WIDE_INT	last_set_nonzero_bits;
216   char				last_set_sign_bit_copies;
217   ENUM_BITFIELD(machine_mode)	last_set_mode : 8;
218 
219   /* Set nonzero if references to register n in expressions should not be
220      used.  last_set_invalid is set nonzero when this register is being
221      assigned to and last_set_table_tick == label_tick.  */
222 
223   char				last_set_invalid;
224 
225   /* Some registers that are set more than once and used in more than one
226      basic block are nevertheless always set in similar ways.  For example,
227      a QImode register may be loaded from memory in two places on a machine
228      where byte loads zero extend.
229 
230      We record in the following fields if a register has some leading bits
231      that are always equal to the sign bit, and what we know about the
232      nonzero bits of a register, specifically which bits are known to be
233      zero.
234 
235      If an entry is zero, it means that we don't know anything special.  */
236 
237   unsigned char			sign_bit_copies;
238 
239   unsigned HOST_WIDE_INT	nonzero_bits;
240 
241   /* Record the value of the label_tick when the last truncation
242      happened.  The field truncated_to_mode is only valid if
243      truncation_label == label_tick.  */
244 
245   int				truncation_label;
246 
247   /* Record the last truncation seen for this register.  If truncation
248      is not a nop to this mode we might be able to save an explicit
249      truncation if we know that value already contains a truncated
250      value.  */
251 
252   ENUM_BITFIELD(machine_mode)	truncated_to_mode : 8;
253 } reg_stat_type;
254 
255 DEF_VEC_O(reg_stat_type);
256 DEF_VEC_ALLOC_O(reg_stat_type,heap);
257 
258 static VEC(reg_stat_type,heap) *reg_stat;
259 
260 /* Record the luid of the last insn that invalidated memory
261    (anything that writes memory, and subroutine calls, but not pushes).  */
262 
263 static int mem_last_set;
264 
265 /* Record the luid of the last CALL_INSN
266    so we can tell whether a potential combination crosses any calls.  */
267 
268 static int last_call_luid;
269 
270 /* When `subst' is called, this is the insn that is being modified
271    (by combining in a previous insn).  The PATTERN of this insn
272    is still the old pattern partially modified and it should not be
273    looked at, but this may be used to examine the successors of the insn
274    to judge whether a simplification is valid.  */
275 
276 static rtx subst_insn;
277 
278 /* This is the lowest LUID that `subst' is currently dealing with.
279    get_last_value will not return a value if the register was set at or
280    after this LUID.  If not for this mechanism, we could get confused if
281    I2 or I1 in try_combine were an insn that used the old value of a register
282    to obtain a new value.  In that case, we might erroneously get the
283    new value of the register when we wanted the old one.  */
284 
285 static int subst_low_luid;
286 
287 /* This contains any hard registers that are used in newpat; reg_dead_at_p
288    must consider all these registers to be always live.  */
289 
290 static HARD_REG_SET newpat_used_regs;
291 
292 /* This is an insn to which a LOG_LINKS entry has been added.  If this
293    insn is the earlier than I2 or I3, combine should rescan starting at
294    that location.  */
295 
296 static rtx added_links_insn;
297 
298 /* Basic block in which we are performing combines.  */
299 static basic_block this_basic_block;
300 static bool optimize_this_for_speed_p;
301 
302 
303 /* Length of the currently allocated uid_insn_cost array.  */
304 
305 static int max_uid_known;
306 
307 /* The following array records the insn_rtx_cost for every insn
308    in the instruction stream.  */
309 
310 static int *uid_insn_cost;
311 
312 /* The following array records the LOG_LINKS for every insn in the
313    instruction stream as an INSN_LIST rtx.  */
314 
315 static rtx *uid_log_links;
316 
317 #define INSN_COST(INSN)		(uid_insn_cost[INSN_UID (INSN)])
318 #define LOG_LINKS(INSN)		(uid_log_links[INSN_UID (INSN)])
319 
320 /* Incremented for each basic block.  */
321 
322 static int label_tick;
323 
324 /* Reset to label_tick for each extended basic block in scanning order.  */
325 
326 static int label_tick_ebb_start;
327 
328 /* Mode used to compute significance in reg_stat[].nonzero_bits.  It is the
329    largest integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
330 
331 static enum machine_mode nonzero_bits_mode;
332 
333 /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
334    be safely used.  It is zero while computing them and after combine has
335    completed.  This former test prevents propagating values based on
336    previously set values, which can be incorrect if a variable is modified
337    in a loop.  */
338 
339 static int nonzero_sign_valid;
340 
341 
342 /* Record one modification to rtl structure
343    to be undone by storing old_contents into *where.  */
344 
345 enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE };
346 
347 struct undo
348 {
349   struct undo *next;
350   enum undo_kind kind;
351   union { rtx r; int i; enum machine_mode m; } old_contents;
352   union { rtx *r; int *i; } where;
353 };
354 
355 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
356    num_undo says how many are currently recorded.
357 
358    other_insn is nonzero if we have modified some other insn in the process
359    of working on subst_insn.  It must be verified too.  */
360 
361 struct undobuf
362 {
363   struct undo *undos;
364   struct undo *frees;
365   rtx other_insn;
366 };
367 
368 static struct undobuf undobuf;
369 
370 /* Number of times the pseudo being substituted for
371    was found and replaced.  */
372 
373 static int n_occurrences;
374 
375 static rtx reg_nonzero_bits_for_combine (const_rtx, enum machine_mode, const_rtx,
376 					 enum machine_mode,
377 					 unsigned HOST_WIDE_INT,
378 					 unsigned HOST_WIDE_INT *);
379 static rtx reg_num_sign_bit_copies_for_combine (const_rtx, enum machine_mode, const_rtx,
380 						enum machine_mode,
381 						unsigned int, unsigned int *);
382 static void do_SUBST (rtx *, rtx);
383 static void do_SUBST_INT (int *, int);
384 static void init_reg_last (void);
385 static void setup_incoming_promotions (rtx);
386 static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
387 static int cant_combine_insn_p (rtx);
388 static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
389 static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
390 static int contains_muldiv (rtx);
391 static rtx try_combine (rtx, rtx, rtx, int *);
392 static void undo_all (void);
393 static void undo_commit (void);
394 static rtx *find_split_point (rtx *, rtx);
395 static rtx subst (rtx, rtx, rtx, int, int);
396 static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
397 static rtx simplify_if_then_else (rtx);
398 static rtx simplify_set (rtx);
399 static rtx simplify_logical (rtx);
400 static rtx expand_compound_operation (rtx);
401 static const_rtx expand_field_assignment (const_rtx);
402 static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
403 			    rtx, unsigned HOST_WIDE_INT, int, int, int);
404 static rtx extract_left_shift (rtx, int);
405 static rtx make_compound_operation (rtx, enum rtx_code);
406 static int get_pos_from_mask (unsigned HOST_WIDE_INT,
407 			      unsigned HOST_WIDE_INT *);
408 static rtx canon_reg_for_combine (rtx, rtx);
409 static rtx force_to_mode (rtx, enum machine_mode,
410 			  unsigned HOST_WIDE_INT, int);
411 static rtx if_then_else_cond (rtx, rtx *, rtx *);
412 static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
413 static int rtx_equal_for_field_assignment_p (rtx, rtx);
414 static rtx make_field_assignment (rtx);
415 static rtx apply_distributive_law (rtx);
416 static rtx distribute_and_simplify_rtx (rtx, int);
417 static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
418 				     unsigned HOST_WIDE_INT);
419 static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
420 				   unsigned HOST_WIDE_INT);
421 static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
422 			    HOST_WIDE_INT, enum machine_mode, int *);
423 static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
424 static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
425 				 int);
426 static int recog_for_combine (rtx *, rtx, rtx *);
427 static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
428 static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
429 static void update_table_tick (rtx);
430 static void record_value_for_reg (rtx, rtx, rtx);
431 static void check_promoted_subreg (rtx, rtx);
432 static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
433 static void record_dead_and_set_regs (rtx);
434 static int get_last_value_validate (rtx *, rtx, int, int);
435 static rtx get_last_value (const_rtx);
436 static int use_crosses_set_p (const_rtx, int);
437 static void reg_dead_at_p_1 (rtx, const_rtx, void *);
438 static int reg_dead_at_p (rtx, rtx);
439 static void move_deaths (rtx, rtx, int, rtx, rtx *);
440 static int reg_bitfield_target_p (rtx, rtx);
441 static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
442 static void distribute_links (rtx);
443 static void mark_used_regs_combine (rtx);
444 static void record_promoted_value (rtx, rtx);
445 static int unmentioned_reg_p_1 (rtx *, void *);
446 static bool unmentioned_reg_p (rtx, rtx);
447 static int record_truncated_value (rtx *, void *);
448 static void record_truncated_values (rtx *, void *);
449 static bool reg_truncated_to_mode (enum machine_mode, const_rtx);
450 static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
451 
452 
453 /* It is not safe to use ordinary gen_lowpart in combine.
454    See comments in gen_lowpart_for_combine.  */
455 #undef RTL_HOOKS_GEN_LOWPART
456 #define RTL_HOOKS_GEN_LOWPART              gen_lowpart_for_combine
457 
458 /* Our implementation of gen_lowpart never emits a new pseudo.  */
459 #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
460 #define RTL_HOOKS_GEN_LOWPART_NO_EMIT      gen_lowpart_for_combine
461 
462 #undef RTL_HOOKS_REG_NONZERO_REG_BITS
463 #define RTL_HOOKS_REG_NONZERO_REG_BITS     reg_nonzero_bits_for_combine
464 
465 #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
466 #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES  reg_num_sign_bit_copies_for_combine
467 
468 #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
469 #define RTL_HOOKS_REG_TRUNCATED_TO_MODE    reg_truncated_to_mode
470 
471 static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
472 
473 
474 /* Try to split PATTERN found in INSN.  This returns NULL_RTX if
475    PATTERN can not be split.  Otherwise, it returns an insn sequence.
476    This is a wrapper around split_insns which ensures that the
477    reg_stat vector is made larger if the splitter creates a new
478    register.  */
479 
480 static rtx
481 combine_split_insns (rtx pattern, rtx insn)
482 {
483   rtx ret;
484   unsigned int nregs;
485 
486   ret = split_insns (pattern, insn);
487   nregs = max_reg_num ();
488   if (nregs > VEC_length (reg_stat_type, reg_stat))
489     VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
490   return ret;
491 }
492 
493 /* This is used by find_single_use to locate an rtx in LOC that
494    contains exactly one use of DEST, which is typically either a REG
495    or CC0.  It returns a pointer to the innermost rtx expression
496    containing DEST.  Appearances of DEST that are being used to
497    totally replace it are not counted.  */
498 
499 static rtx *
500 find_single_use_1 (rtx dest, rtx *loc)
501 {
502   rtx x = *loc;
503   enum rtx_code code = GET_CODE (x);
504   rtx *result = NULL;
505   rtx *this_result;
506   int i;
507   const char *fmt;
508 
509   switch (code)
510     {
511     case CONST_INT:
512     case CONST:
513     case LABEL_REF:
514     case SYMBOL_REF:
515     case CONST_DOUBLE:
516     case CONST_VECTOR:
517     case CLOBBER:
518       return 0;
519 
520     case SET:
521       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
522 	 of a REG that occupies all of the REG, the insn uses DEST if
523 	 it is mentioned in the destination or the source.  Otherwise, we
524 	 need just check the source.  */
525       if (GET_CODE (SET_DEST (x)) != CC0
526 	  && GET_CODE (SET_DEST (x)) != PC
527 	  && !REG_P (SET_DEST (x))
528 	  && ! (GET_CODE (SET_DEST (x)) == SUBREG
529 		&& REG_P (SUBREG_REG (SET_DEST (x)))
530 		&& (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
531 		      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
532 		    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
533 			 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
534 	break;
535 
536       return find_single_use_1 (dest, &SET_SRC (x));
537 
538     case MEM:
539     case SUBREG:
540       return find_single_use_1 (dest, &XEXP (x, 0));
541 
542     default:
543       break;
544     }
545 
546   /* If it wasn't one of the common cases above, check each expression and
547      vector of this code.  Look for a unique usage of DEST.  */
548 
549   fmt = GET_RTX_FORMAT (code);
550   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
551     {
552       if (fmt[i] == 'e')
553 	{
554 	  if (dest == XEXP (x, i)
555 	      || (REG_P (dest) && REG_P (XEXP (x, i))
556 		  && REGNO (dest) == REGNO (XEXP (x, i))))
557 	    this_result = loc;
558 	  else
559 	    this_result = find_single_use_1 (dest, &XEXP (x, i));
560 
561 	  if (result == NULL)
562 	    result = this_result;
563 	  else if (this_result)
564 	    /* Duplicate usage.  */
565 	    return NULL;
566 	}
567       else if (fmt[i] == 'E')
568 	{
569 	  int j;
570 
571 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
572 	    {
573 	      if (XVECEXP (x, i, j) == dest
574 		  || (REG_P (dest)
575 		      && REG_P (XVECEXP (x, i, j))
576 		      && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
577 		this_result = loc;
578 	      else
579 		this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
580 
581 	      if (result == NULL)
582 		result = this_result;
583 	      else if (this_result)
584 		return NULL;
585 	    }
586 	}
587     }
588 
589   return result;
590 }
591 
592 
593 /* See if DEST, produced in INSN, is used only a single time in the
594    sequel.  If so, return a pointer to the innermost rtx expression in which
595    it is used.
596 
597    If PLOC is nonzero, *PLOC is set to the insn containing the single use.
598 
599    If DEST is cc0_rtx, we look only at the next insn.  In that case, we don't
600    care about REG_DEAD notes or LOG_LINKS.
601 
602    Otherwise, we find the single use by finding an insn that has a
603    LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST.  If DEST is
604    only referenced once in that insn, we know that it must be the first
605    and last insn referencing DEST.  */
606 
607 static rtx *
608 find_single_use (rtx dest, rtx insn, rtx *ploc)
609 {
610   basic_block bb;
611   rtx next;
612   rtx *result;
613   rtx link;
614 
615 #ifdef HAVE_cc0
616   if (dest == cc0_rtx)
617     {
618       next = NEXT_INSN (insn);
619       if (next == 0
620 	  || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
621 	return 0;
622 
623       result = find_single_use_1 (dest, &PATTERN (next));
624       if (result && ploc)
625 	*ploc = next;
626       return result;
627     }
628 #endif
629 
630   if (!REG_P (dest))
631     return 0;
632 
633   bb = BLOCK_FOR_INSN (insn);
634   for (next = NEXT_INSN (insn);
635        next && BLOCK_FOR_INSN (next) == bb;
636        next = NEXT_INSN (next))
637     if (INSN_P (next) && dead_or_set_p (next, dest))
638       {
639 	for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
640 	  if (XEXP (link, 0) == insn)
641 	    break;
642 
643 	if (link)
644 	  {
645 	    result = find_single_use_1 (dest, &PATTERN (next));
646 	    if (ploc)
647 	      *ploc = next;
648 	    return result;
649 	  }
650       }
651 
652   return 0;
653 }
654 
655 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
656    insn.  The substitution can be undone by undo_all.  If INTO is already
657    set to NEWVAL, do not record this change.  Because computing NEWVAL might
658    also call SUBST, we have to compute it before we put anything into
659    the undo table.  */
660 
661 static void
662 do_SUBST (rtx *into, rtx newval)
663 {
664   struct undo *buf;
665   rtx oldval = *into;
666 
667   if (oldval == newval)
668     return;
669 
670   /* We'd like to catch as many invalid transformations here as
671      possible.  Unfortunately, there are way too many mode changes
672      that are perfectly valid, so we'd waste too much effort for
673      little gain doing the checks here.  Focus on catching invalid
674      transformations involving integer constants.  */
675   if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
676       && CONST_INT_P (newval))
677     {
678       /* Sanity check that we're replacing oldval with a CONST_INT
679 	 that is a valid sign-extension for the original mode.  */
680       gcc_assert (INTVAL (newval)
681 		  == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
682 
683       /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
684 	 CONST_INT is not valid, because after the replacement, the
685 	 original mode would be gone.  Unfortunately, we can't tell
686 	 when do_SUBST is called to replace the operand thereof, so we
687 	 perform this test on oldval instead, checking whether an
688 	 invalid replacement took place before we got here.  */
689       gcc_assert (!(GET_CODE (oldval) == SUBREG
690 		    && CONST_INT_P (SUBREG_REG (oldval))));
691       gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
692 		    && CONST_INT_P (XEXP (oldval, 0))));
693     }
694 
695   if (undobuf.frees)
696     buf = undobuf.frees, undobuf.frees = buf->next;
697   else
698     buf = XNEW (struct undo);
699 
700   buf->kind = UNDO_RTX;
701   buf->where.r = into;
702   buf->old_contents.r = oldval;
703   *into = newval;
704 
705   buf->next = undobuf.undos, undobuf.undos = buf;
706 }
707 
708 #define SUBST(INTO, NEWVAL)	do_SUBST(&(INTO), (NEWVAL))
709 
710 /* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
711    for the value of a HOST_WIDE_INT value (including CONST_INT) is
712    not safe.  */
713 
714 static void
715 do_SUBST_INT (int *into, int newval)
716 {
717   struct undo *buf;
718   int oldval = *into;
719 
720   if (oldval == newval)
721     return;
722 
723   if (undobuf.frees)
724     buf = undobuf.frees, undobuf.frees = buf->next;
725   else
726     buf = XNEW (struct undo);
727 
728   buf->kind = UNDO_INT;
729   buf->where.i = into;
730   buf->old_contents.i = oldval;
731   *into = newval;
732 
733   buf->next = undobuf.undos, undobuf.undos = buf;
734 }
735 
736 #define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
737 
738 /* Similar to SUBST, but just substitute the mode.  This is used when
739    changing the mode of a pseudo-register, so that any other
740    references to the entry in the regno_reg_rtx array will change as
741    well.  */
742 
743 static void
744 do_SUBST_MODE (rtx *into, enum machine_mode newval)
745 {
746   struct undo *buf;
747   enum machine_mode oldval = GET_MODE (*into);
748 
749   if (oldval == newval)
750     return;
751 
752   if (undobuf.frees)
753     buf = undobuf.frees, undobuf.frees = buf->next;
754   else
755     buf = XNEW (struct undo);
756 
757   buf->kind = UNDO_MODE;
758   buf->where.r = into;
759   buf->old_contents.m = oldval;
760   adjust_reg_mode (*into, newval);
761 
762   buf->next = undobuf.undos, undobuf.undos = buf;
763 }
764 
765 #define SUBST_MODE(INTO, NEWVAL)  do_SUBST_MODE(&(INTO), (NEWVAL))
766 
767 /* Subroutine of try_combine.  Determine whether the combine replacement
768    patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to
769    insn_rtx_cost that the original instruction sequence I1, I2, I3 and
770    undobuf.other_insn.  Note that I1 and/or NEWI2PAT may be NULL_RTX.
771    NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX.  This
772    function returns false, if the costs of all instructions can be
773    estimated, and the replacements are more expensive than the original
774    sequence.  */
775 
776 static bool
777 combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat,
778 		       rtx newotherpat)
779 {
780   int i1_cost, i2_cost, i3_cost;
781   int new_i2_cost, new_i3_cost;
782   int old_cost, new_cost;
783 
784   /* Lookup the original insn_rtx_costs.  */
785   i2_cost = INSN_COST (i2);
786   i3_cost = INSN_COST (i3);
787 
788   if (i1)
789     {
790       i1_cost = INSN_COST (i1);
791       old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
792 		 ? i1_cost + i2_cost + i3_cost : 0;
793     }
794   else
795     {
796       old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
797       i1_cost = 0;
798     }
799 
800   /* Calculate the replacement insn_rtx_costs.  */
801   new_i3_cost = insn_rtx_cost (newpat, optimize_this_for_speed_p);
802   if (newi2pat)
803     {
804       new_i2_cost = insn_rtx_cost (newi2pat, optimize_this_for_speed_p);
805       new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
806 		 ? new_i2_cost + new_i3_cost : 0;
807     }
808   else
809     {
810       new_cost = new_i3_cost;
811       new_i2_cost = 0;
812     }
813 
814   if (undobuf.other_insn)
815     {
816       int old_other_cost, new_other_cost;
817 
818       old_other_cost = INSN_COST (undobuf.other_insn);
819       new_other_cost = insn_rtx_cost (newotherpat, optimize_this_for_speed_p);
820       if (old_other_cost > 0 && new_other_cost > 0)
821 	{
822 	  old_cost += old_other_cost;
823 	  new_cost += new_other_cost;
824 	}
825       else
826 	old_cost = 0;
827     }
828 
829   /* Disallow this recombination if both new_cost and old_cost are
830      greater than zero, and new_cost is greater than old cost.  */
831   if (old_cost > 0
832       && new_cost > old_cost)
833     {
834       if (dump_file)
835 	{
836 	  if (i1)
837 	    {
838 	      fprintf (dump_file,
839 		       "rejecting combination of insns %d, %d and %d\n",
840 		       INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
841 	      fprintf (dump_file, "original costs %d + %d + %d = %d\n",
842 		       i1_cost, i2_cost, i3_cost, old_cost);
843 	    }
844 	  else
845 	    {
846 	      fprintf (dump_file,
847 		       "rejecting combination of insns %d and %d\n",
848 		       INSN_UID (i2), INSN_UID (i3));
849 	      fprintf (dump_file, "original costs %d + %d = %d\n",
850 		       i2_cost, i3_cost, old_cost);
851 	    }
852 
853 	  if (newi2pat)
854 	    {
855 	      fprintf (dump_file, "replacement costs %d + %d = %d\n",
856 		       new_i2_cost, new_i3_cost, new_cost);
857 	    }
858 	  else
859 	    fprintf (dump_file, "replacement cost %d\n", new_cost);
860 	}
861 
862       return false;
863     }
864 
865   /* Update the uid_insn_cost array with the replacement costs.  */
866   INSN_COST (i2) = new_i2_cost;
867   INSN_COST (i3) = new_i3_cost;
868   if (i1)
869     INSN_COST (i1) = 0;
870 
871   return true;
872 }
873 
874 
875 /* Delete any insns that copy a register to itself.  */
876 
877 static void
878 delete_noop_moves (void)
879 {
880   rtx insn, next;
881   basic_block bb;
882 
883   FOR_EACH_BB (bb)
884     {
885       for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
886 	{
887 	  next = NEXT_INSN (insn);
888 	  if (INSN_P (insn) && noop_move_p (insn))
889 	    {
890 	      if (dump_file)
891 		fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
892 
893 	      delete_insn_and_edges (insn);
894 	    }
895 	}
896     }
897 }
898 
899 
900 /* Fill in log links field for all insns.  */
901 
902 static void
903 create_log_links (void)
904 {
905   basic_block bb;
906   rtx *next_use, insn;
907   df_ref *def_vec, *use_vec;
908 
909   next_use = XCNEWVEC (rtx, max_reg_num ());
910 
911   /* Pass through each block from the end, recording the uses of each
912      register and establishing log links when def is encountered.
913      Note that we do not clear next_use array in order to save time,
914      so we have to test whether the use is in the same basic block as def.
915 
916      There are a few cases below when we do not consider the definition or
917      usage -- these are taken from original flow.c did. Don't ask me why it is
918      done this way; I don't know and if it works, I don't want to know.  */
919 
920   FOR_EACH_BB (bb)
921     {
922       FOR_BB_INSNS_REVERSE (bb, insn)
923         {
924           if (!NONDEBUG_INSN_P (insn))
925             continue;
926 
927 	  /* Log links are created only once.  */
928 	  gcc_assert (!LOG_LINKS (insn));
929 
930           for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
931             {
932 	      df_ref def = *def_vec;
933               int regno = DF_REF_REGNO (def);
934               rtx use_insn;
935 
936               if (!next_use[regno])
937                 continue;
938 
939               /* Do not consider if it is pre/post modification in MEM.  */
940               if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
941                 continue;
942 
943               /* Do not make the log link for frame pointer.  */
944               if ((regno == FRAME_POINTER_REGNUM
945                    && (! reload_completed || frame_pointer_needed))
946 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
947                   || (regno == HARD_FRAME_POINTER_REGNUM
948                       && (! reload_completed || frame_pointer_needed))
949 #endif
950 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
951                   || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
952 #endif
953                   )
954                 continue;
955 
956               use_insn = next_use[regno];
957               if (BLOCK_FOR_INSN (use_insn) == bb)
958                 {
959                   /* flow.c claimed:
960 
961                      We don't build a LOG_LINK for hard registers contained
962                      in ASM_OPERANDs.  If these registers get replaced,
963                      we might wind up changing the semantics of the insn,
964                      even if reload can make what appear to be valid
965                      assignments later.  */
966                   if (regno >= FIRST_PSEUDO_REGISTER
967                       || asm_noperands (PATTERN (use_insn)) < 0)
968 		    {
969 		      /* Don't add duplicate links between instructions.  */
970 		      rtx links;
971 		      for (links = LOG_LINKS (use_insn); links;
972 			   links = XEXP (links, 1))
973 		        if (insn == XEXP (links, 0))
974 			  break;
975 
976 		      if (!links)
977 			LOG_LINKS (use_insn) =
978 			  alloc_INSN_LIST (insn, LOG_LINKS (use_insn));
979 		    }
980                 }
981               next_use[regno] = NULL_RTX;
982             }
983 
984           for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
985             {
986 	      df_ref use = *use_vec;
987 	      int regno = DF_REF_REGNO (use);
988 
989               /* Do not consider the usage of the stack pointer
990 		 by function call.  */
991               if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
992                 continue;
993 
994               next_use[regno] = insn;
995             }
996         }
997     }
998 
999   free (next_use);
1000 }
1001 
1002 /* Clear LOG_LINKS fields of insns.  */
1003 
1004 static void
1005 clear_log_links (void)
1006 {
1007   rtx insn;
1008 
1009   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1010     if (INSN_P (insn))
1011       free_INSN_LIST_list (&LOG_LINKS (insn));
1012 }
1013 
1014 /* Main entry point for combiner.  F is the first insn of the function.
1015    NREGS is the first unused pseudo-reg number.
1016 
1017    Return nonzero if the combiner has turned an indirect jump
1018    instruction into a direct jump.  */
1019 static int
1020 combine_instructions (rtx f, unsigned int nregs)
1021 {
1022   rtx insn, next;
1023 #ifdef HAVE_cc0
1024   rtx prev;
1025 #endif
1026   rtx links, nextlinks;
1027   rtx first;
1028   basic_block last_bb;
1029 
1030   int new_direct_jump_p = 0;
1031 
1032   for (first = f; first && !INSN_P (first); )
1033     first = NEXT_INSN (first);
1034   if (!first)
1035     return 0;
1036 
1037   combine_attempts = 0;
1038   combine_merges = 0;
1039   combine_extras = 0;
1040   combine_successes = 0;
1041 
1042   rtl_hooks = combine_rtl_hooks;
1043 
1044   VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
1045 
1046   init_recog_no_volatile ();
1047 
1048   /* Allocate array for insn info.  */
1049   max_uid_known = get_max_uid ();
1050   uid_log_links = XCNEWVEC (rtx, max_uid_known + 1);
1051   uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
1052 
1053   nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1054 
1055   /* Don't use reg_stat[].nonzero_bits when computing it.  This can cause
1056      problems when, for example, we have j <<= 1 in a loop.  */
1057 
1058   nonzero_sign_valid = 0;
1059   label_tick = label_tick_ebb_start = 1;
1060 
1061   /* Scan all SETs and see if we can deduce anything about what
1062      bits are known to be zero for some registers and how many copies
1063      of the sign bit are known to exist for those registers.
1064 
1065      Also set any known values so that we can use it while searching
1066      for what bits are known to be set.  */
1067 
1068   setup_incoming_promotions (first);
1069   /* Allow the entry block and the first block to fall into the same EBB.
1070      Conceptually the incoming promotions are assigned to the entry block.  */
1071   last_bb = ENTRY_BLOCK_PTR;
1072 
1073   create_log_links ();
1074   FOR_EACH_BB (this_basic_block)
1075     {
1076       optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1077       last_call_luid = 0;
1078       mem_last_set = -1;
1079 
1080       label_tick++;
1081       if (!single_pred_p (this_basic_block)
1082 	  || single_pred (this_basic_block) != last_bb)
1083 	label_tick_ebb_start = label_tick;
1084       last_bb = this_basic_block;
1085 
1086       FOR_BB_INSNS (this_basic_block, insn)
1087         if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1088 	  {
1089             subst_low_luid = DF_INSN_LUID (insn);
1090             subst_insn = insn;
1091 
1092 	    note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
1093 		         insn);
1094 	    record_dead_and_set_regs (insn);
1095 
1096 #ifdef AUTO_INC_DEC
1097 	    for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1098 	      if (REG_NOTE_KIND (links) == REG_INC)
1099 	        set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1100 						  insn);
1101 #endif
1102 
1103 	    /* Record the current insn_rtx_cost of this instruction.  */
1104 	    if (NONJUMP_INSN_P (insn))
1105 	      INSN_COST (insn) = insn_rtx_cost (PATTERN (insn),
1106 	      					optimize_this_for_speed_p);
1107 	    if (dump_file)
1108 	      fprintf(dump_file, "insn_cost %d: %d\n",
1109 		    INSN_UID (insn), INSN_COST (insn));
1110 	  }
1111     }
1112 
1113   nonzero_sign_valid = 1;
1114 
1115   /* Now scan all the insns in forward order.  */
1116   label_tick = label_tick_ebb_start = 1;
1117   init_reg_last ();
1118   setup_incoming_promotions (first);
1119   last_bb = ENTRY_BLOCK_PTR;
1120 
1121   FOR_EACH_BB (this_basic_block)
1122     {
1123       optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1124       last_call_luid = 0;
1125       mem_last_set = -1;
1126 
1127       label_tick++;
1128       if (!single_pred_p (this_basic_block)
1129 	  || single_pred (this_basic_block) != last_bb)
1130 	label_tick_ebb_start = label_tick;
1131       last_bb = this_basic_block;
1132 
1133       rtl_profile_for_bb (this_basic_block);
1134       for (insn = BB_HEAD (this_basic_block);
1135 	   insn != NEXT_INSN (BB_END (this_basic_block));
1136 	   insn = next ? next : NEXT_INSN (insn))
1137 	{
1138 	  next = 0;
1139 	  if (NONDEBUG_INSN_P (insn))
1140 	    {
1141 	      /* See if we know about function return values before this
1142 		 insn based upon SUBREG flags.  */
1143 	      check_promoted_subreg (insn, PATTERN (insn));
1144 
1145 	      /* See if we can find hardregs and subreg of pseudos in
1146 		 narrower modes.  This could help turning TRUNCATEs
1147 		 into SUBREGs.  */
1148 	      note_uses (&PATTERN (insn), record_truncated_values, NULL);
1149 
1150 	      /* Try this insn with each insn it links back to.  */
1151 
1152 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1153 		if ((next = try_combine (insn, XEXP (links, 0),
1154 					 NULL_RTX, &new_direct_jump_p)) != 0)
1155 		  goto retry;
1156 
1157 	      /* Try each sequence of three linked insns ending with this one.  */
1158 
1159 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1160 		{
1161 		  rtx link = XEXP (links, 0);
1162 
1163 		  /* If the linked insn has been replaced by a note, then there
1164 		     is no point in pursuing this chain any further.  */
1165 		  if (NOTE_P (link))
1166 		    continue;
1167 
1168 		  for (nextlinks = LOG_LINKS (link);
1169 		       nextlinks;
1170 		       nextlinks = XEXP (nextlinks, 1))
1171 		    if ((next = try_combine (insn, link,
1172 					     XEXP (nextlinks, 0),
1173 					     &new_direct_jump_p)) != 0)
1174 		      goto retry;
1175 		}
1176 
1177 #ifdef HAVE_cc0
1178 	      /* Try to combine a jump insn that uses CC0
1179 		 with a preceding insn that sets CC0, and maybe with its
1180 		 logical predecessor as well.
1181 		 This is how we make decrement-and-branch insns.
1182 		 We need this special code because data flow connections
1183 		 via CC0 do not get entered in LOG_LINKS.  */
1184 
1185 	      if (JUMP_P (insn)
1186 		  && (prev = prev_nonnote_insn (insn)) != 0
1187 		  && NONJUMP_INSN_P (prev)
1188 		  && sets_cc0_p (PATTERN (prev)))
1189 		{
1190 		  if ((next = try_combine (insn, prev,
1191 					   NULL_RTX, &new_direct_jump_p)) != 0)
1192 		    goto retry;
1193 
1194 		  for (nextlinks = LOG_LINKS (prev); nextlinks;
1195 		       nextlinks = XEXP (nextlinks, 1))
1196 		    if ((next = try_combine (insn, prev,
1197 					     XEXP (nextlinks, 0),
1198 					     &new_direct_jump_p)) != 0)
1199 		      goto retry;
1200 		}
1201 
1202 	      /* Do the same for an insn that explicitly references CC0.  */
1203 	      if (NONJUMP_INSN_P (insn)
1204 		  && (prev = prev_nonnote_insn (insn)) != 0
1205 		  && NONJUMP_INSN_P (prev)
1206 		  && sets_cc0_p (PATTERN (prev))
1207 		  && GET_CODE (PATTERN (insn)) == SET
1208 		  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
1209 		{
1210 		  if ((next = try_combine (insn, prev,
1211 					   NULL_RTX, &new_direct_jump_p)) != 0)
1212 		    goto retry;
1213 
1214 		  for (nextlinks = LOG_LINKS (prev); nextlinks;
1215 		       nextlinks = XEXP (nextlinks, 1))
1216 		    if ((next = try_combine (insn, prev,
1217 					     XEXP (nextlinks, 0),
1218 					     &new_direct_jump_p)) != 0)
1219 		      goto retry;
1220 		}
1221 
1222 	      /* Finally, see if any of the insns that this insn links to
1223 		 explicitly references CC0.  If so, try this insn, that insn,
1224 		 and its predecessor if it sets CC0.  */
1225 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1226 		if (NONJUMP_INSN_P (XEXP (links, 0))
1227 		    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
1228 		    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
1229 		    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
1230 		    && NONJUMP_INSN_P (prev)
1231 		    && sets_cc0_p (PATTERN (prev))
1232 		    && (next = try_combine (insn, XEXP (links, 0),
1233 					    prev, &new_direct_jump_p)) != 0)
1234 		  goto retry;
1235 #endif
1236 
1237 	      /* Try combining an insn with two different insns whose results it
1238 		 uses.  */
1239 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1240 		for (nextlinks = XEXP (links, 1); nextlinks;
1241 		     nextlinks = XEXP (nextlinks, 1))
1242 		  if ((next = try_combine (insn, XEXP (links, 0),
1243 					   XEXP (nextlinks, 0),
1244 					   &new_direct_jump_p)) != 0)
1245 		    goto retry;
1246 
1247 	      /* Try this insn with each REG_EQUAL note it links back to.  */
1248 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1249 		{
1250 		  rtx set, note;
1251 		  rtx temp = XEXP (links, 0);
1252 		  if ((set = single_set (temp)) != 0
1253 		      && (note = find_reg_equal_equiv_note (temp)) != 0
1254 		      && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
1255 		      /* Avoid using a register that may already been marked
1256 			 dead by an earlier instruction.  */
1257 		      && ! unmentioned_reg_p (note, SET_SRC (set))
1258 		      && (GET_MODE (note) == VOIDmode
1259 			  ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1260 			  : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
1261 		    {
1262 		      /* Temporarily replace the set's source with the
1263 			 contents of the REG_EQUAL note.  The insn will
1264 			 be deleted or recognized by try_combine.  */
1265 		      rtx orig = SET_SRC (set);
1266 		      SET_SRC (set) = note;
1267 		      i2mod = temp;
1268 		      i2mod_old_rhs = copy_rtx (orig);
1269 		      i2mod_new_rhs = copy_rtx (note);
1270 		      next = try_combine (insn, i2mod, NULL_RTX,
1271 					  &new_direct_jump_p);
1272 		      i2mod = NULL_RTX;
1273 		      if (next)
1274 			goto retry;
1275 		      SET_SRC (set) = orig;
1276 		    }
1277 		}
1278 
1279 	      if (!NOTE_P (insn))
1280 		record_dead_and_set_regs (insn);
1281 
1282 	    retry:
1283 	      ;
1284 	    }
1285 	}
1286     }
1287 
1288   default_rtl_profile ();
1289   clear_log_links ();
1290   clear_bb_flags ();
1291   new_direct_jump_p |= purge_all_dead_edges ();
1292   delete_noop_moves ();
1293 
1294   /* Clean up.  */
1295   free (uid_log_links);
1296   free (uid_insn_cost);
1297   VEC_free (reg_stat_type, heap, reg_stat);
1298 
1299   {
1300     struct undo *undo, *next;
1301     for (undo = undobuf.frees; undo; undo = next)
1302       {
1303 	next = undo->next;
1304 	free (undo);
1305       }
1306     undobuf.frees = 0;
1307   }
1308 
1309   total_attempts += combine_attempts;
1310   total_merges += combine_merges;
1311   total_extras += combine_extras;
1312   total_successes += combine_successes;
1313 
1314   nonzero_sign_valid = 0;
1315   rtl_hooks = general_rtl_hooks;
1316 
1317   /* Make recognizer allow volatile MEMs again.  */
1318   init_recog ();
1319 
1320   return new_direct_jump_p;
1321 }
1322 
1323 /* Wipe the last_xxx fields of reg_stat in preparation for another pass.  */
1324 
1325 static void
1326 init_reg_last (void)
1327 {
1328   unsigned int i;
1329   reg_stat_type *p;
1330 
1331   for (i = 0; VEC_iterate (reg_stat_type, reg_stat, i, p); ++i)
1332     memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
1333 }
1334 
1335 /* Set up any promoted values for incoming argument registers.  */
1336 
1337 static void
1338 setup_incoming_promotions (rtx first)
1339 {
1340   tree arg;
1341   bool strictly_local = false;
1342 
1343   for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1344        arg = TREE_CHAIN (arg))
1345     {
1346       rtx x, reg = DECL_INCOMING_RTL (arg);
1347       int uns1, uns3;
1348       enum machine_mode mode1, mode2, mode3, mode4;
1349 
1350       /* Only continue if the incoming argument is in a register.  */
1351       if (!REG_P (reg))
1352 	continue;
1353 
1354       /* Determine, if possible, whether all call sites of the current
1355          function lie within the current compilation unit.  (This does
1356 	 take into account the exporting of a function via taking its
1357 	 address, and so forth.)  */
1358       strictly_local = cgraph_local_info (current_function_decl)->local;
1359 
1360       /* The mode and signedness of the argument before any promotions happen
1361          (equal to the mode of the pseudo holding it at that stage).  */
1362       mode1 = TYPE_MODE (TREE_TYPE (arg));
1363       uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1364 
1365       /* The mode and signedness of the argument after any source language and
1366          TARGET_PROMOTE_PROTOTYPES-driven promotions.  */
1367       mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1368       uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1369 
1370       /* The mode and signedness of the argument as it is actually passed,
1371          after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions.  */
1372       mode3 = promote_function_mode (DECL_ARG_TYPE (arg), mode2, &uns3,
1373 				     TREE_TYPE (cfun->decl), 0);
1374 
1375       /* The mode of the register in which the argument is being passed.  */
1376       mode4 = GET_MODE (reg);
1377 
1378       /* Eliminate sign extensions in the callee when:
1379 	 (a) A mode promotion has occurred;  */
1380       if (mode1 == mode3)
1381 	continue;
1382       /* (b) The mode of the register is the same as the mode of
1383 	     the argument as it is passed; */
1384       if (mode3 != mode4)
1385 	continue;
1386       /* (c) There's no language level extension;  */
1387       if (mode1 == mode2)
1388 	;
1389       /* (c.1) All callers are from the current compilation unit.  If that's
1390 	 the case we don't have to rely on an ABI, we only have to know
1391 	 what we're generating right now, and we know that we will do the
1392 	 mode1 to mode2 promotion with the given sign.  */
1393       else if (!strictly_local)
1394 	continue;
1395       /* (c.2) The combination of the two promotions is useful.  This is
1396 	 true when the signs match, or if the first promotion is unsigned.
1397 	 In the later case, (sign_extend (zero_extend x)) is the same as
1398 	 (zero_extend (zero_extend x)), so make sure to force UNS3 true.  */
1399       else if (uns1)
1400 	uns3 = true;
1401       else if (uns3)
1402 	continue;
1403 
1404       /* Record that the value was promoted from mode1 to mode3,
1405 	 so that any sign extension at the head of the current
1406 	 function may be eliminated.  */
1407       x = gen_rtx_CLOBBER (mode1, const0_rtx);
1408       x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1409       record_value_for_reg (reg, first, x);
1410     }
1411 }
1412 
1413 /* Called via note_stores.  If X is a pseudo that is narrower than
1414    HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1415 
1416    If we are setting only a portion of X and we can't figure out what
1417    portion, assume all bits will be used since we don't know what will
1418    be happening.
1419 
1420    Similarly, set how many bits of X are known to be copies of the sign bit
1421    at all locations in the function.  This is the smallest number implied
1422    by any set of X.  */
1423 
1424 static void
1425 set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1426 {
1427   rtx insn = (rtx) data;
1428   unsigned int num;
1429 
1430   if (REG_P (x)
1431       && REGNO (x) >= FIRST_PSEUDO_REGISTER
1432       /* If this register is undefined at the start of the file, we can't
1433 	 say what its contents were.  */
1434       && ! REGNO_REG_SET_P
1435            (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
1436       && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1437     {
1438       reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
1439 
1440       if (set == 0 || GET_CODE (set) == CLOBBER)
1441 	{
1442 	  rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1443 	  rsp->sign_bit_copies = 1;
1444 	  return;
1445 	}
1446 
1447       /* If this register is being initialized using itself, and the
1448 	 register is uninitialized in this basic block, and there are
1449 	 no LOG_LINKS which set the register, then part of the
1450 	 register is uninitialized.  In that case we can't assume
1451 	 anything about the number of nonzero bits.
1452 
1453 	 ??? We could do better if we checked this in
1454 	 reg_{nonzero_bits,num_sign_bit_copies}_for_combine.  Then we
1455 	 could avoid making assumptions about the insn which initially
1456 	 sets the register, while still using the information in other
1457 	 insns.  We would have to be careful to check every insn
1458 	 involved in the combination.  */
1459 
1460       if (insn
1461 	  && reg_referenced_p (x, PATTERN (insn))
1462 	  && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1463 			       REGNO (x)))
1464 	{
1465 	  rtx link;
1466 
1467 	  for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1468 	    {
1469 	      if (dead_or_set_p (XEXP (link, 0), x))
1470 		break;
1471 	    }
1472 	  if (!link)
1473 	    {
1474 	      rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1475 	      rsp->sign_bit_copies = 1;
1476 	      return;
1477 	    }
1478 	}
1479 
1480       /* If this is a complex assignment, see if we can convert it into a
1481 	 simple assignment.  */
1482       set = expand_field_assignment (set);
1483 
1484       /* If this is a simple assignment, or we have a paradoxical SUBREG,
1485 	 set what we know about X.  */
1486 
1487       if (SET_DEST (set) == x
1488 	  || (GET_CODE (SET_DEST (set)) == SUBREG
1489 	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1490 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1491 	      && SUBREG_REG (SET_DEST (set)) == x))
1492 	{
1493 	  rtx src = SET_SRC (set);
1494 
1495 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1496 	  /* If X is narrower than a word and SRC is a non-negative
1497 	     constant that would appear negative in the mode of X,
1498 	     sign-extend it for use in reg_stat[].nonzero_bits because some
1499 	     machines (maybe most) will actually do the sign-extension
1500 	     and this is the conservative approach.
1501 
1502 	     ??? For 2.5, try to tighten up the MD files in this regard
1503 	     instead of this kludge.  */
1504 
1505 	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1506 	      && CONST_INT_P (src)
1507 	      && INTVAL (src) > 0
1508 	      && 0 != (INTVAL (src)
1509 		       & ((HOST_WIDE_INT) 1
1510 			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1511 	    src = GEN_INT (INTVAL (src)
1512 			   | ((HOST_WIDE_INT) (-1)
1513 			      << GET_MODE_BITSIZE (GET_MODE (x))));
1514 #endif
1515 
1516 	  /* Don't call nonzero_bits if it cannot change anything.  */
1517 	  if (rsp->nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1518 	    rsp->nonzero_bits |= nonzero_bits (src, nonzero_bits_mode);
1519 	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1520 	  if (rsp->sign_bit_copies == 0
1521 	      || rsp->sign_bit_copies > num)
1522 	    rsp->sign_bit_copies = num;
1523 	}
1524       else
1525 	{
1526 	  rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1527 	  rsp->sign_bit_copies = 1;
1528 	}
1529     }
1530 }
1531 
1532 /* See if INSN can be combined into I3.  PRED and SUCC are optionally
1533    insns that were previously combined into I3 or that will be combined
1534    into the merger of INSN and I3.
1535 
1536    Return 0 if the combination is not allowed for any reason.
1537 
1538    If the combination is allowed, *PDEST will be set to the single
1539    destination of INSN and *PSRC to the single source, and this function
1540    will return 1.  */
1541 
1542 static int
1543 can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1544 	       rtx *pdest, rtx *psrc)
1545 {
1546   int i;
1547   const_rtx set = 0;
1548   rtx src, dest;
1549   rtx p;
1550 #ifdef AUTO_INC_DEC
1551   rtx link;
1552 #endif
1553   int all_adjacent = (succ ? (next_active_insn (insn) == succ
1554 			      && next_active_insn (succ) == i3)
1555 		      : next_active_insn (insn) == i3);
1556 
1557   /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1558      or a PARALLEL consisting of such a SET and CLOBBERs.
1559 
1560      If INSN has CLOBBER parallel parts, ignore them for our processing.
1561      By definition, these happen during the execution of the insn.  When it
1562      is merged with another insn, all bets are off.  If they are, in fact,
1563      needed and aren't also supplied in I3, they may be added by
1564      recog_for_combine.  Otherwise, it won't match.
1565 
1566      We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1567      note.
1568 
1569      Get the source and destination of INSN.  If more than one, can't
1570      combine.  */
1571 
1572   if (GET_CODE (PATTERN (insn)) == SET)
1573     set = PATTERN (insn);
1574   else if (GET_CODE (PATTERN (insn)) == PARALLEL
1575 	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1576     {
1577       for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1578 	{
1579 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
1580 
1581 	  switch (GET_CODE (elt))
1582 	    {
1583 	    /* This is important to combine floating point insns
1584 	       for the SH4 port.  */
1585 	    case USE:
1586 	      /* Combining an isolated USE doesn't make sense.
1587 		 We depend here on combinable_i3pat to reject them.  */
1588 	      /* The code below this loop only verifies that the inputs of
1589 		 the SET in INSN do not change.  We call reg_set_between_p
1590 		 to verify that the REG in the USE does not change between
1591 		 I3 and INSN.
1592 		 If the USE in INSN was for a pseudo register, the matching
1593 		 insn pattern will likely match any register; combining this
1594 		 with any other USE would only be safe if we knew that the
1595 		 used registers have identical values, or if there was
1596 		 something to tell them apart, e.g. different modes.  For
1597 		 now, we forgo such complicated tests and simply disallow
1598 		 combining of USES of pseudo registers with any other USE.  */
1599 	      if (REG_P (XEXP (elt, 0))
1600 		  && GET_CODE (PATTERN (i3)) == PARALLEL)
1601 		{
1602 		  rtx i3pat = PATTERN (i3);
1603 		  int i = XVECLEN (i3pat, 0) - 1;
1604 		  unsigned int regno = REGNO (XEXP (elt, 0));
1605 
1606 		  do
1607 		    {
1608 		      rtx i3elt = XVECEXP (i3pat, 0, i);
1609 
1610 		      if (GET_CODE (i3elt) == USE
1611 			  && REG_P (XEXP (i3elt, 0))
1612 			  && (REGNO (XEXP (i3elt, 0)) == regno
1613 			      ? reg_set_between_p (XEXP (elt, 0),
1614 						   PREV_INSN (insn), i3)
1615 			      : regno >= FIRST_PSEUDO_REGISTER))
1616 			return 0;
1617 		    }
1618 		  while (--i >= 0);
1619 		}
1620 	      break;
1621 
1622 	      /* We can ignore CLOBBERs.  */
1623 	    case CLOBBER:
1624 	      break;
1625 
1626 	    case SET:
1627 	      /* Ignore SETs whose result isn't used but not those that
1628 		 have side-effects.  */
1629 	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1630 		  && insn_nothrow_p (insn)
1631 		  && !side_effects_p (elt))
1632 		break;
1633 
1634 	      /* If we have already found a SET, this is a second one and
1635 		 so we cannot combine with this insn.  */
1636 	      if (set)
1637 		return 0;
1638 
1639 	      set = elt;
1640 	      break;
1641 
1642 	    default:
1643 	      /* Anything else means we can't combine.  */
1644 	      return 0;
1645 	    }
1646 	}
1647 
1648       if (set == 0
1649 	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1650 	     so don't do anything with it.  */
1651 	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1652 	return 0;
1653     }
1654   else
1655     return 0;
1656 
1657   if (set == 0)
1658     return 0;
1659 
1660   set = expand_field_assignment (set);
1661   src = SET_SRC (set), dest = SET_DEST (set);
1662 
1663   /* Don't eliminate a store in the stack pointer.  */
1664   if (dest == stack_pointer_rtx
1665       /* Don't combine with an insn that sets a register to itself if it has
1666 	 a REG_EQUAL note.  This may be part of a LIBCALL sequence.  */
1667       || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1668       /* Can't merge an ASM_OPERANDS.  */
1669       || GET_CODE (src) == ASM_OPERANDS
1670       /* Can't merge a function call.  */
1671       || GET_CODE (src) == CALL
1672       /* Don't eliminate a function call argument.  */
1673       || (CALL_P (i3)
1674 	  && (find_reg_fusage (i3, USE, dest)
1675 	      || (REG_P (dest)
1676 		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1677 		  && global_regs[REGNO (dest)])))
1678       /* Don't substitute into an incremented register.  */
1679       || FIND_REG_INC_NOTE (i3, dest)
1680       || (succ && FIND_REG_INC_NOTE (succ, dest))
1681       /* Don't substitute into a non-local goto, this confuses CFG.  */
1682       || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1683       /* Make sure that DEST is not used after SUCC but before I3.  */
1684       || (succ && ! all_adjacent
1685 	  && reg_used_between_p (dest, succ, i3))
1686       /* Make sure that the value that is to be substituted for the register
1687 	 does not use any registers whose values alter in between.  However,
1688 	 If the insns are adjacent, a use can't cross a set even though we
1689 	 think it might (this can happen for a sequence of insns each setting
1690 	 the same destination; last_set of that register might point to
1691 	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
1692 	 equivalent to the memory so the substitution is valid even if there
1693 	 are intervening stores.  Also, don't move a volatile asm or
1694 	 UNSPEC_VOLATILE across any other insns.  */
1695       || (! all_adjacent
1696 	  && (((!MEM_P (src)
1697 		|| ! find_reg_note (insn, REG_EQUIV, src))
1698 	       && use_crosses_set_p (src, DF_INSN_LUID (insn)))
1699 	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1700 	      || GET_CODE (src) == UNSPEC_VOLATILE))
1701       /* Don't combine across a CALL_INSN, because that would possibly
1702 	 change whether the life span of some REGs crosses calls or not,
1703 	 and it is a pain to update that information.
1704 	 Exception: if source is a constant, moving it later can't hurt.
1705 	 Accept that as a special case.  */
1706       || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
1707     return 0;
1708 
1709   /* DEST must either be a REG or CC0.  */
1710   if (REG_P (dest))
1711     {
1712       /* If register alignment is being enforced for multi-word items in all
1713 	 cases except for parameters, it is possible to have a register copy
1714 	 insn referencing a hard register that is not allowed to contain the
1715 	 mode being copied and which would not be valid as an operand of most
1716 	 insns.  Eliminate this problem by not combining with such an insn.
1717 
1718 	 Also, on some machines we don't want to extend the life of a hard
1719 	 register.  */
1720 
1721       if (REG_P (src)
1722 	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1723 	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1724 	      /* Don't extend the life of a hard register unless it is
1725 		 user variable (if we have few registers) or it can't
1726 		 fit into the desired register (meaning something special
1727 		 is going on).
1728 		 Also avoid substituting a return register into I3, because
1729 		 reload can't handle a conflict with constraints of other
1730 		 inputs.  */
1731 	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1732 		  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1733 	return 0;
1734     }
1735   else if (GET_CODE (dest) != CC0)
1736     return 0;
1737 
1738 
1739   if (GET_CODE (PATTERN (i3)) == PARALLEL)
1740     for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1741       if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1742 	{
1743 	  /* Don't substitute for a register intended as a clobberable
1744 	     operand.  */
1745 	  rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1746 	  if (rtx_equal_p (reg, dest))
1747 	    return 0;
1748 
1749 	  /* If the clobber represents an earlyclobber operand, we must not
1750 	     substitute an expression containing the clobbered register.
1751 	     As we do not analyze the constraint strings here, we have to
1752 	     make the conservative assumption.  However, if the register is
1753 	     a fixed hard reg, the clobber cannot represent any operand;
1754 	     we leave it up to the machine description to either accept or
1755 	     reject use-and-clobber patterns.  */
1756 	  if (!REG_P (reg)
1757 	      || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1758 	      || !fixed_regs[REGNO (reg)])
1759 	    if (reg_overlap_mentioned_p (reg, src))
1760 	      return 0;
1761 	}
1762 
1763   /* If INSN contains anything volatile, or is an `asm' (whether volatile
1764      or not), reject, unless nothing volatile comes between it and I3 */
1765 
1766   if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1767     {
1768       /* Make sure succ doesn't contain a volatile reference.  */
1769       if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1770 	return 0;
1771 
1772       for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1773 	if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1774 	  return 0;
1775     }
1776 
1777   /* If INSN is an asm, and DEST is a hard register, reject, since it has
1778      to be an explicit register variable, and was chosen for a reason.  */
1779 
1780   if (GET_CODE (src) == ASM_OPERANDS
1781       && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1782     return 0;
1783 
1784   /* If there are any volatile insns between INSN and I3, reject, because
1785      they might affect machine state.  */
1786 
1787   for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1788     if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1789       return 0;
1790 
1791   /* If INSN contains an autoincrement or autodecrement, make sure that
1792      register is not used between there and I3, and not already used in
1793      I3 either.  Neither must it be used in PRED or SUCC, if they exist.
1794      Also insist that I3 not be a jump; if it were one
1795      and the incremented register were spilled, we would lose.  */
1796 
1797 #ifdef AUTO_INC_DEC
1798   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1799     if (REG_NOTE_KIND (link) == REG_INC
1800 	&& (JUMP_P (i3)
1801 	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1802 	    || (pred != NULL_RTX
1803 		&& reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1804 	    || (succ != NULL_RTX
1805 		&& reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1806 	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1807       return 0;
1808 #endif
1809 
1810 #ifdef HAVE_cc0
1811   /* Don't combine an insn that follows a CC0-setting insn.
1812      An insn that uses CC0 must not be separated from the one that sets it.
1813      We do, however, allow I2 to follow a CC0-setting insn if that insn
1814      is passed as I1; in that case it will be deleted also.
1815      We also allow combining in this case if all the insns are adjacent
1816      because that would leave the two CC0 insns adjacent as well.
1817      It would be more logical to test whether CC0 occurs inside I1 or I2,
1818      but that would be much slower, and this ought to be equivalent.  */
1819 
1820   p = prev_nonnote_insn (insn);
1821   if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1822       && ! all_adjacent)
1823     return 0;
1824 #endif
1825 
1826   /* If we get here, we have passed all the tests and the combination is
1827      to be allowed.  */
1828 
1829   *pdest = dest;
1830   *psrc = src;
1831 
1832   return 1;
1833 }
1834 
1835 /* LOC is the location within I3 that contains its pattern or the component
1836    of a PARALLEL of the pattern.  We validate that it is valid for combining.
1837 
1838    One problem is if I3 modifies its output, as opposed to replacing it
1839    entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1840    so would produce an insn that is not equivalent to the original insns.
1841 
1842    Consider:
1843 
1844 	 (set (reg:DI 101) (reg:DI 100))
1845 	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1846 
1847    This is NOT equivalent to:
1848 
1849 	 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1850 		    (set (reg:DI 101) (reg:DI 100))])
1851 
1852    Not only does this modify 100 (in which case it might still be valid
1853    if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1854 
1855    We can also run into a problem if I2 sets a register that I1
1856    uses and I1 gets directly substituted into I3 (not via I2).  In that
1857    case, we would be getting the wrong value of I2DEST into I3, so we
1858    must reject the combination.  This case occurs when I2 and I1 both
1859    feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1860    If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1861    of a SET must prevent combination from occurring.
1862 
1863    Before doing the above check, we first try to expand a field assignment
1864    into a set of logical operations.
1865 
1866    If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1867    we place a register that is both set and used within I3.  If more than one
1868    such register is detected, we fail.
1869 
1870    Return 1 if the combination is valid, zero otherwise.  */
1871 
1872 static int
1873 combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1874 		  int i1_not_in_src, rtx *pi3dest_killed)
1875 {
1876   rtx x = *loc;
1877 
1878   if (GET_CODE (x) == SET)
1879     {
1880       rtx set = x ;
1881       rtx dest = SET_DEST (set);
1882       rtx src = SET_SRC (set);
1883       rtx inner_dest = dest;
1884       rtx subdest;
1885 
1886       while (GET_CODE (inner_dest) == STRICT_LOW_PART
1887 	     || GET_CODE (inner_dest) == SUBREG
1888 	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1889 	inner_dest = XEXP (inner_dest, 0);
1890 
1891       /* Check for the case where I3 modifies its output, as discussed
1892 	 above.  We don't want to prevent pseudos from being combined
1893 	 into the address of a MEM, so only prevent the combination if
1894 	 i1 or i2 set the same MEM.  */
1895       if ((inner_dest != dest &&
1896 	   (!MEM_P (inner_dest)
1897 	    || rtx_equal_p (i2dest, inner_dest)
1898 	    || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1899 	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1900 	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1901 
1902 	  /* This is the same test done in can_combine_p except we can't test
1903 	     all_adjacent; we don't have to, since this instruction will stay
1904 	     in place, thus we are not considering increasing the lifetime of
1905 	     INNER_DEST.
1906 
1907 	     Also, if this insn sets a function argument, combining it with
1908 	     something that might need a spill could clobber a previous
1909 	     function argument; the all_adjacent test in can_combine_p also
1910 	     checks this; here, we do a more specific test for this case.  */
1911 
1912 	  || (REG_P (inner_dest)
1913 	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1914 	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1915 					GET_MODE (inner_dest))))
1916 	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1917 	return 0;
1918 
1919       /* If DEST is used in I3, it is being killed in this insn, so
1920 	 record that for later.  We have to consider paradoxical
1921 	 subregs here, since they kill the whole register, but we
1922 	 ignore partial subregs, STRICT_LOW_PART, etc.
1923 	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1924 	 STACK_POINTER_REGNUM, since these are always considered to be
1925 	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1926       subdest = dest;
1927       if (GET_CODE (subdest) == SUBREG
1928 	  && (GET_MODE_SIZE (GET_MODE (subdest))
1929 	      >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1930 	subdest = SUBREG_REG (subdest);
1931       if (pi3dest_killed
1932 	  && REG_P (subdest)
1933 	  && reg_referenced_p (subdest, PATTERN (i3))
1934 	  && REGNO (subdest) != FRAME_POINTER_REGNUM
1935 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1936 	  && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1937 #endif
1938 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1939 	  && (REGNO (subdest) != ARG_POINTER_REGNUM
1940 	      || ! fixed_regs [REGNO (subdest)])
1941 #endif
1942 	  && REGNO (subdest) != STACK_POINTER_REGNUM)
1943 	{
1944 	  if (*pi3dest_killed)
1945 	    return 0;
1946 
1947 	  *pi3dest_killed = subdest;
1948 	}
1949     }
1950 
1951   else if (GET_CODE (x) == PARALLEL)
1952     {
1953       int i;
1954 
1955       for (i = 0; i < XVECLEN (x, 0); i++)
1956 	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1957 				i1_not_in_src, pi3dest_killed))
1958 	  return 0;
1959     }
1960 
1961   return 1;
1962 }
1963 
1964 /* Return 1 if X is an arithmetic expression that contains a multiplication
1965    and division.  We don't count multiplications by powers of two here.  */
1966 
1967 static int
1968 contains_muldiv (rtx x)
1969 {
1970   switch (GET_CODE (x))
1971     {
1972     case MOD:  case DIV:  case UMOD:  case UDIV:
1973       return 1;
1974 
1975     case MULT:
1976       return ! (CONST_INT_P (XEXP (x, 1))
1977 		&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1978     default:
1979       if (BINARY_P (x))
1980 	return contains_muldiv (XEXP (x, 0))
1981 	    || contains_muldiv (XEXP (x, 1));
1982 
1983       if (UNARY_P (x))
1984 	return contains_muldiv (XEXP (x, 0));
1985 
1986       return 0;
1987     }
1988 }
1989 
1990 /* Determine whether INSN can be used in a combination.  Return nonzero if
1991    not.  This is used in try_combine to detect early some cases where we
1992    can't perform combinations.  */
1993 
1994 static int
1995 cant_combine_insn_p (rtx insn)
1996 {
1997   rtx set;
1998   rtx src, dest;
1999 
2000   /* If this isn't really an insn, we can't do anything.
2001      This can occur when flow deletes an insn that it has merged into an
2002      auto-increment address.  */
2003   if (! INSN_P (insn))
2004     return 1;
2005 
2006   /* Never combine loads and stores involving hard regs that are likely
2007      to be spilled.  The register allocator can usually handle such
2008      reg-reg moves by tying.  If we allow the combiner to make
2009      substitutions of likely-spilled regs, reload might die.
2010      As an exception, we allow combinations involving fixed regs; these are
2011      not available to the register allocator so there's no risk involved.  */
2012 
2013   set = single_set (insn);
2014   if (! set)
2015     return 0;
2016   src = SET_SRC (set);
2017   dest = SET_DEST (set);
2018   if (GET_CODE (src) == SUBREG)
2019     src = SUBREG_REG (src);
2020   if (GET_CODE (dest) == SUBREG)
2021     dest = SUBREG_REG (dest);
2022   if (REG_P (src) && REG_P (dest)
2023       && ((REGNO (src) < FIRST_PSEUDO_REGISTER
2024 	   && ! fixed_regs[REGNO (src)]
2025 	   && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
2026 	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
2027 	      && ! fixed_regs[REGNO (dest)]
2028 	      && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
2029     return 1;
2030 
2031   return 0;
2032 }
2033 
2034 struct likely_spilled_retval_info
2035 {
2036   unsigned regno, nregs;
2037   unsigned mask;
2038 };
2039 
2040 /* Called via note_stores by likely_spilled_retval_p.  Remove from info->mask
2041    hard registers that are known to be written to / clobbered in full.  */
2042 static void
2043 likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2044 {
2045   struct likely_spilled_retval_info *const info =
2046     (struct likely_spilled_retval_info *) data;
2047   unsigned regno, nregs;
2048   unsigned new_mask;
2049 
2050   if (!REG_P (XEXP (set, 0)))
2051     return;
2052   regno = REGNO (x);
2053   if (regno >= info->regno + info->nregs)
2054     return;
2055   nregs = hard_regno_nregs[regno][GET_MODE (x)];
2056   if (regno + nregs <= info->regno)
2057     return;
2058   new_mask = (2U << (nregs - 1)) - 1;
2059   if (regno < info->regno)
2060     new_mask >>= info->regno - regno;
2061   else
2062     new_mask <<= regno - info->regno;
2063   info->mask &= ~new_mask;
2064 }
2065 
2066 /* Return nonzero iff part of the return value is live during INSN, and
2067    it is likely spilled.  This can happen when more than one insn is needed
2068    to copy the return value, e.g. when we consider to combine into the
2069    second copy insn for a complex value.  */
2070 
2071 static int
2072 likely_spilled_retval_p (rtx insn)
2073 {
2074   rtx use = BB_END (this_basic_block);
2075   rtx reg, p;
2076   unsigned regno, nregs;
2077   /* We assume here that no machine mode needs more than
2078      32 hard registers when the value overlaps with a register
2079      for which FUNCTION_VALUE_REGNO_P is true.  */
2080   unsigned mask;
2081   struct likely_spilled_retval_info info;
2082 
2083   if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2084     return 0;
2085   reg = XEXP (PATTERN (use), 0);
2086   if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
2087     return 0;
2088   regno = REGNO (reg);
2089   nregs = hard_regno_nregs[regno][GET_MODE (reg)];
2090   if (nregs == 1)
2091     return 0;
2092   mask = (2U << (nregs - 1)) - 1;
2093 
2094   /* Disregard parts of the return value that are set later.  */
2095   info.regno = regno;
2096   info.nregs = nregs;
2097   info.mask = mask;
2098   for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2099     if (INSN_P (p))
2100       note_stores (PATTERN (p), likely_spilled_retval_1, &info);
2101   mask = info.mask;
2102 
2103   /* Check if any of the (probably) live return value registers is
2104      likely spilled.  */
2105   nregs --;
2106   do
2107     {
2108       if ((mask & 1 << nregs)
2109 	  && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
2110 	return 1;
2111     } while (nregs--);
2112   return 0;
2113 }
2114 
2115 /* Adjust INSN after we made a change to its destination.
2116 
2117    Changing the destination can invalidate notes that say something about
2118    the results of the insn and a LOG_LINK pointing to the insn.  */
2119 
2120 static void
2121 adjust_for_new_dest (rtx insn)
2122 {
2123   /* For notes, be conservative and simply remove them.  */
2124   remove_reg_equal_equiv_notes (insn);
2125 
2126   /* The new insn will have a destination that was previously the destination
2127      of an insn just above it.  Call distribute_links to make a LOG_LINK from
2128      the next use of that destination.  */
2129   distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
2130 
2131   df_insn_rescan (insn);
2132 }
2133 
2134 /* Return TRUE if combine can reuse reg X in mode MODE.
2135    ADDED_SETS is nonzero if the original set is still required.  */
2136 static bool
2137 can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
2138 {
2139   unsigned int regno;
2140 
2141   if (!REG_P(x))
2142     return false;
2143 
2144   regno = REGNO (x);
2145   /* Allow hard registers if the new mode is legal, and occupies no more
2146      registers than the old mode.  */
2147   if (regno < FIRST_PSEUDO_REGISTER)
2148     return (HARD_REGNO_MODE_OK (regno, mode)
2149 	    && (hard_regno_nregs[regno][GET_MODE (x)]
2150 		>= hard_regno_nregs[regno][mode]));
2151 
2152   /* Or a pseudo that is only used once.  */
2153   return (REG_N_SETS (regno) == 1 && !added_sets
2154 	  && !REG_USERVAR_P (x));
2155 }
2156 
2157 
2158 /* Check whether X, the destination of a set, refers to part of
2159    the register specified by REG.  */
2160 
2161 static bool
2162 reg_subword_p (rtx x, rtx reg)
2163 {
2164   /* Check that reg is an integer mode register.  */
2165   if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2166     return false;
2167 
2168   if (GET_CODE (x) == STRICT_LOW_PART
2169       || GET_CODE (x) == ZERO_EXTRACT)
2170     x = XEXP (x, 0);
2171 
2172   return GET_CODE (x) == SUBREG
2173 	 && SUBREG_REG (x) == reg
2174 	 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2175 }
2176 
2177 #ifdef AUTO_INC_DEC
2178 /* Replace auto-increment addressing modes with explicit operations to
2179    access the same addresses without modifying the corresponding
2180    registers.  If AFTER holds, SRC is meant to be reused after the
2181    side effect, otherwise it is to be reused before that.  */
2182 
2183 static rtx
2184 cleanup_auto_inc_dec (rtx src, bool after, enum machine_mode mem_mode)
2185 {
2186   rtx x = src;
2187   const RTX_CODE code = GET_CODE (x);
2188   int i;
2189   const char *fmt;
2190 
2191   switch (code)
2192     {
2193     case REG:
2194     case CONST_INT:
2195     case CONST_DOUBLE:
2196     case CONST_FIXED:
2197     case CONST_VECTOR:
2198     case SYMBOL_REF:
2199     case CODE_LABEL:
2200     case PC:
2201     case CC0:
2202     case SCRATCH:
2203       /* SCRATCH must be shared because they represent distinct values.  */
2204       return x;
2205     case CLOBBER:
2206       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2207 	return x;
2208       break;
2209 
2210     case CONST:
2211       if (shared_const_p (x))
2212 	return x;
2213       break;
2214 
2215     case MEM:
2216       mem_mode = GET_MODE (x);
2217       break;
2218 
2219     case PRE_INC:
2220     case PRE_DEC:
2221     case POST_INC:
2222     case POST_DEC:
2223       gcc_assert (mem_mode != VOIDmode && mem_mode != BLKmode);
2224       if (after == (code == PRE_INC || code == PRE_DEC))
2225 	x = cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode);
2226       else
2227 	x = gen_rtx_PLUS (GET_MODE (x),
2228 			  cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode),
2229 			  GEN_INT ((code == PRE_INC || code == POST_INC)
2230 				   ? GET_MODE_SIZE (mem_mode)
2231 				   : -GET_MODE_SIZE (mem_mode)));
2232       return x;
2233 
2234     case PRE_MODIFY:
2235     case POST_MODIFY:
2236       if (after == (code == PRE_MODIFY))
2237 	x = XEXP (x, 0);
2238       else
2239 	x = XEXP (x, 1);
2240       return cleanup_auto_inc_dec (x, after, mem_mode);
2241 
2242     default:
2243       break;
2244     }
2245 
2246   /* Copy the various flags, fields, and other information.  We assume
2247      that all fields need copying, and then clear the fields that should
2248      not be copied.  That is the sensible default behavior, and forces
2249      us to explicitly document why we are *not* copying a flag.  */
2250   x = shallow_copy_rtx (x);
2251 
2252   /* We do not copy the USED flag, which is used as a mark bit during
2253      walks over the RTL.  */
2254   RTX_FLAG (x, used) = 0;
2255 
2256   /* We do not copy FRAME_RELATED for INSNs.  */
2257   if (INSN_P (x))
2258     RTX_FLAG (x, frame_related) = 0;
2259 
2260   fmt = GET_RTX_FORMAT (code);
2261   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2262     if (fmt[i] == 'e')
2263       XEXP (x, i) = cleanup_auto_inc_dec (XEXP (x, i), after, mem_mode);
2264     else if (fmt[i] == 'E' || fmt[i] == 'V')
2265       {
2266 	int j;
2267 	XVEC (x, i) = rtvec_alloc (XVECLEN (x, i));
2268 	for (j = 0; j < XVECLEN (x, i); j++)
2269 	  XVECEXP (x, i, j)
2270 	    = cleanup_auto_inc_dec (XVECEXP (src, i, j), after, mem_mode);
2271       }
2272 
2273   return x;
2274 }
2275 
2276 /* Auxiliary data structure for propagate_for_debug_stmt.  */
2277 
2278 struct rtx_subst_pair
2279 {
2280   rtx to;
2281   bool adjusted;
2282   bool after;
2283 };
2284 
2285 /* DATA points to an rtx_subst_pair.  Return the value that should be
2286    substituted.  */
2287 
2288 static rtx
2289 propagate_for_debug_subst (rtx from, const_rtx old_rtx, void *data)
2290 {
2291   struct rtx_subst_pair *pair = (struct rtx_subst_pair *)data;
2292 
2293   if (!rtx_equal_p (from, old_rtx))
2294     return NULL_RTX;
2295   if (!pair->adjusted)
2296     {
2297       pair->adjusted = true;
2298       pair->to = cleanup_auto_inc_dec (pair->to, pair->after, VOIDmode);
2299       return pair->to;
2300     }
2301   return copy_rtx (pair->to);
2302 }
2303 #endif
2304 
2305 /* Replace occurrences of DEST with SRC in DEBUG_INSNs between INSN
2306    and LAST.  If MOVE holds, debug insns must also be moved past
2307    LAST.  */
2308 
2309 static void
2310 propagate_for_debug (rtx insn, rtx last, rtx dest, rtx src, bool move)
2311 {
2312   rtx next, move_pos = move ? last : NULL_RTX, loc;
2313 
2314 #ifdef AUTO_INC_DEC
2315   struct rtx_subst_pair p;
2316   p.to = src;
2317   p.adjusted = false;
2318   p.after = move;
2319 #endif
2320 
2321   next = NEXT_INSN (insn);
2322   while (next != last)
2323     {
2324       insn = next;
2325       next = NEXT_INSN (insn);
2326       if (DEBUG_INSN_P (insn))
2327 	{
2328 #ifdef AUTO_INC_DEC
2329 	  loc = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
2330 					 dest, propagate_for_debug_subst, &p);
2331 #else
2332 	  loc = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn), dest, src);
2333 #endif
2334 	  if (loc == INSN_VAR_LOCATION_LOC (insn))
2335 	    continue;
2336 	  INSN_VAR_LOCATION_LOC (insn) = loc;
2337 	  if (move_pos)
2338 	    {
2339 	      remove_insn (insn);
2340 	      PREV_INSN (insn) = NEXT_INSN (insn) = NULL_RTX;
2341 	      move_pos = emit_debug_insn_after (insn, move_pos);
2342 	    }
2343 	  else
2344 	    df_insn_rescan (insn);
2345 	}
2346     }
2347 }
2348 
2349 /* Delete the unconditional jump INSN and adjust the CFG correspondingly.
2350    Note that the INSN should be deleted *after* removing dead edges, so
2351    that the kept edge is the fallthrough edge for a (set (pc) (pc))
2352    but not for a (set (pc) (label_ref FOO)).  */
2353 
2354 static void
2355 update_cfg_for_uncondjump (rtx insn)
2356 {
2357   basic_block bb = BLOCK_FOR_INSN (insn);
2358   bool at_end = (BB_END (bb) == insn);
2359 
2360   if (at_end)
2361     purge_dead_edges (bb);
2362 
2363   delete_insn (insn);
2364   if (at_end && EDGE_COUNT (bb->succs) == 1)
2365     {
2366       rtx insn;
2367 
2368       single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2369 
2370       /* Remove barriers from the footer if there are any.  */
2371       for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
2372 	if (BARRIER_P (insn))
2373 	  {
2374 	    if (PREV_INSN (insn))
2375 	      NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2376 	    else
2377 	      bb->il.rtl->footer = NEXT_INSN (insn);
2378 	    if (NEXT_INSN (insn))
2379 	      PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2380 	  }
2381 	else if (LABEL_P (insn))
2382 	  break;
2383     }
2384 }
2385 
2386 
2387 /* Try to combine the insns I1 and I2 into I3.
2388    Here I1 and I2 appear earlier than I3.
2389    I1 can be zero; then we combine just I2 into I3.
2390 
2391    If we are combining three insns and the resulting insn is not recognized,
2392    try splitting it into two insns.  If that happens, I2 and I3 are retained
2393    and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
2394    are pseudo-deleted.
2395 
2396    Return 0 if the combination does not work.  Then nothing is changed.
2397    If we did the combination, return the insn at which combine should
2398    resume scanning.
2399 
2400    Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
2401    new direct jump instruction.  */
2402 
2403 static rtx
2404 try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
2405 {
2406   /* New patterns for I3 and I2, respectively.  */
2407   rtx newpat, newi2pat = 0;
2408   rtvec newpat_vec_with_clobbers = 0;
2409   int substed_i2 = 0, substed_i1 = 0;
2410   /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
2411   int added_sets_1, added_sets_2;
2412   /* Total number of SETs to put into I3.  */
2413   int total_sets;
2414   /* Nonzero if I2's body now appears in I3.  */
2415   int i2_is_used;
2416   /* INSN_CODEs for new I3, new I2, and user of condition code.  */
2417   int insn_code_number, i2_code_number = 0, other_code_number = 0;
2418   /* Contains I3 if the destination of I3 is used in its source, which means
2419      that the old life of I3 is being killed.  If that usage is placed into
2420      I2 and not in I3, a REG_DEAD note must be made.  */
2421   rtx i3dest_killed = 0;
2422   /* SET_DEST and SET_SRC of I2 and I1.  */
2423   rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0;
2424   /* Set if I2DEST was reused as a scratch register.  */
2425   bool i2scratch = false;
2426   /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases.  */
2427   rtx i1pat = 0, i2pat = 0;
2428   /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
2429   int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
2430   int i2dest_killed = 0, i1dest_killed = 0;
2431   int i1_feeds_i3 = 0;
2432   /* Notes that must be added to REG_NOTES in I3 and I2.  */
2433   rtx new_i3_notes, new_i2_notes;
2434   /* Notes that we substituted I3 into I2 instead of the normal case.  */
2435   int i3_subst_into_i2 = 0;
2436   /* Notes that I1, I2 or I3 is a MULT operation.  */
2437   int have_mult = 0;
2438   int swap_i2i3 = 0;
2439   int changed_i3_dest = 0;
2440 
2441   int maxreg;
2442   rtx temp;
2443   rtx link;
2444   rtx other_pat = 0;
2445   rtx new_other_notes;
2446   int i;
2447 
2448   /* Exit early if one of the insns involved can't be used for
2449      combinations.  */
2450   if (cant_combine_insn_p (i3)
2451       || cant_combine_insn_p (i2)
2452       || (i1 && cant_combine_insn_p (i1))
2453       || likely_spilled_retval_p (i3))
2454     return 0;
2455 
2456   combine_attempts++;
2457   undobuf.other_insn = 0;
2458 
2459   /* Reset the hard register usage information.  */
2460   CLEAR_HARD_REG_SET (newpat_used_regs);
2461 
2462   if (dump_file && (dump_flags & TDF_DETAILS))
2463     {
2464       if (i1)
2465 	fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2466 		 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2467       else
2468 	fprintf (dump_file, "\nTrying %d -> %d:\n",
2469 		 INSN_UID (i2), INSN_UID (i3));
2470     }
2471 
2472   /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
2473      code below, set I1 to be the earlier of the two insns.  */
2474   if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
2475     temp = i1, i1 = i2, i2 = temp;
2476 
2477   added_links_insn = 0;
2478 
2479   /* First check for one important special-case that the code below will
2480      not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
2481      and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
2482      we may be able to replace that destination with the destination of I3.
2483      This occurs in the common code where we compute both a quotient and
2484      remainder into a structure, in which case we want to do the computation
2485      directly into the structure to avoid register-register copies.
2486 
2487      Note that this case handles both multiple sets in I2 and also
2488      cases where I2 has a number of CLOBBER or PARALLELs.
2489 
2490      We make very conservative checks below and only try to handle the
2491      most common cases of this.  For example, we only handle the case
2492      where I2 and I3 are adjacent to avoid making difficult register
2493      usage tests.  */
2494 
2495   if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
2496       && REG_P (SET_SRC (PATTERN (i3)))
2497       && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
2498       && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2499       && GET_CODE (PATTERN (i2)) == PARALLEL
2500       && ! side_effects_p (SET_DEST (PATTERN (i3)))
2501       /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2502 	 below would need to check what is inside (and reg_overlap_mentioned_p
2503 	 doesn't support those codes anyway).  Don't allow those destinations;
2504 	 the resulting insn isn't likely to be recognized anyway.  */
2505       && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2506       && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
2507       && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2508 				    SET_DEST (PATTERN (i3)))
2509       && next_active_insn (i2) == i3)
2510     {
2511       rtx p2 = PATTERN (i2);
2512 
2513       /* Make sure that the destination of I3,
2514 	 which we are going to substitute into one output of I2,
2515 	 is not used within another output of I2.  We must avoid making this:
2516 	 (parallel [(set (mem (reg 69)) ...)
2517 		    (set (reg 69) ...)])
2518 	 which is not well-defined as to order of actions.
2519 	 (Besides, reload can't handle output reloads for this.)
2520 
2521 	 The problem can also happen if the dest of I3 is a memory ref,
2522 	 if another dest in I2 is an indirect memory ref.  */
2523       for (i = 0; i < XVECLEN (p2, 0); i++)
2524 	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2525 	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2526 	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2527 					SET_DEST (XVECEXP (p2, 0, i))))
2528 	  break;
2529 
2530       if (i == XVECLEN (p2, 0))
2531 	for (i = 0; i < XVECLEN (p2, 0); i++)
2532 	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2533 	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2534 	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
2535 	    {
2536 	      combine_merges++;
2537 
2538 	      subst_insn = i3;
2539 	      subst_low_luid = DF_INSN_LUID (i2);
2540 
2541 	      added_sets_2 = added_sets_1 = 0;
2542 	      i2src = SET_DEST (PATTERN (i3));
2543 	      i2dest = SET_SRC (PATTERN (i3));
2544 	      i2dest_killed = dead_or_set_p (i2, i2dest);
2545 
2546 	      /* Replace the dest in I2 with our dest and make the resulting
2547 		 insn the new pattern for I3.  Then skip to where we
2548 		 validate the pattern.  Everything was set up above.  */
2549 	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
2550 		     SET_DEST (PATTERN (i3)));
2551 
2552 	      newpat = p2;
2553 	      i3_subst_into_i2 = 1;
2554 	      goto validate_replacement;
2555 	    }
2556     }
2557 
2558   /* If I2 is setting a pseudo to a constant and I3 is setting some
2559      sub-part of it to another constant, merge them by making a new
2560      constant.  */
2561   if (i1 == 0
2562       && (temp = single_set (i2)) != 0
2563       && (CONST_INT_P (SET_SRC (temp))
2564 	  || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
2565       && GET_CODE (PATTERN (i3)) == SET
2566       && (CONST_INT_P (SET_SRC (PATTERN (i3)))
2567 	  || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
2568       && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
2569     {
2570       rtx dest = SET_DEST (PATTERN (i3));
2571       int offset = -1;
2572       int width = 0;
2573 
2574       if (GET_CODE (dest) == ZERO_EXTRACT)
2575 	{
2576 	  if (CONST_INT_P (XEXP (dest, 1))
2577 	      && CONST_INT_P (XEXP (dest, 2)))
2578 	    {
2579 	      width = INTVAL (XEXP (dest, 1));
2580 	      offset = INTVAL (XEXP (dest, 2));
2581 	      dest = XEXP (dest, 0);
2582 	      if (BITS_BIG_ENDIAN)
2583 		offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset;
2584 	    }
2585 	}
2586       else
2587 	{
2588 	  if (GET_CODE (dest) == STRICT_LOW_PART)
2589 	    dest = XEXP (dest, 0);
2590 	  width = GET_MODE_BITSIZE (GET_MODE (dest));
2591 	  offset = 0;
2592 	}
2593 
2594       if (offset >= 0)
2595 	{
2596 	  /* If this is the low part, we're done.  */
2597 	  if (subreg_lowpart_p (dest))
2598 	    ;
2599 	  /* Handle the case where inner is twice the size of outer.  */
2600 	  else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2601 		   == 2 * GET_MODE_BITSIZE (GET_MODE (dest)))
2602 	    offset += GET_MODE_BITSIZE (GET_MODE (dest));
2603 	  /* Otherwise give up for now.  */
2604 	  else
2605 	    offset = -1;
2606 	}
2607 
2608       if (offset >= 0
2609 	  && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2610 	      <= HOST_BITS_PER_WIDE_INT * 2))
2611 	{
2612 	  HOST_WIDE_INT mhi, ohi, ihi;
2613 	  HOST_WIDE_INT mlo, olo, ilo;
2614 	  rtx inner = SET_SRC (PATTERN (i3));
2615 	  rtx outer = SET_SRC (temp);
2616 
2617 	  if (CONST_INT_P (outer))
2618 	    {
2619 	      olo = INTVAL (outer);
2620 	      ohi = olo < 0 ? -1 : 0;
2621 	    }
2622 	  else
2623 	    {
2624 	      olo = CONST_DOUBLE_LOW (outer);
2625 	      ohi = CONST_DOUBLE_HIGH (outer);
2626 	    }
2627 
2628 	  if (CONST_INT_P (inner))
2629 	    {
2630 	      ilo = INTVAL (inner);
2631 	      ihi = ilo < 0 ? -1 : 0;
2632 	    }
2633 	  else
2634 	    {
2635 	      ilo = CONST_DOUBLE_LOW (inner);
2636 	      ihi = CONST_DOUBLE_HIGH (inner);
2637 	    }
2638 
2639 	  if (width < HOST_BITS_PER_WIDE_INT)
2640 	    {
2641 	      mlo = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
2642 	      mhi = 0;
2643 	    }
2644 	  else if (width < HOST_BITS_PER_WIDE_INT * 2)
2645 	    {
2646 	      mhi = ((unsigned HOST_WIDE_INT) 1
2647 		     << (width - HOST_BITS_PER_WIDE_INT)) - 1;
2648 	      mlo = -1;
2649 	    }
2650 	  else
2651 	    {
2652 	      mlo = -1;
2653 	      mhi = -1;
2654 	    }
2655 
2656 	  ilo &= mlo;
2657 	  ihi &= mhi;
2658 
2659 	  if (offset >= HOST_BITS_PER_WIDE_INT)
2660 	    {
2661 	      mhi = mlo << (offset - HOST_BITS_PER_WIDE_INT);
2662 	      mlo = 0;
2663 	      ihi = ilo << (offset - HOST_BITS_PER_WIDE_INT);
2664 	      ilo = 0;
2665 	    }
2666 	  else if (offset > 0)
2667 	    {
2668 	      mhi = (mhi << offset) | ((unsigned HOST_WIDE_INT) mlo
2669 		     		       >> (HOST_BITS_PER_WIDE_INT - offset));
2670 	      mlo = mlo << offset;
2671 	      ihi = (ihi << offset) | ((unsigned HOST_WIDE_INT) ilo
2672 		     		       >> (HOST_BITS_PER_WIDE_INT - offset));
2673 	      ilo = ilo << offset;
2674 	    }
2675 
2676 	  olo = (olo & ~mlo) | ilo;
2677 	  ohi = (ohi & ~mhi) | ihi;
2678 
2679 	  combine_merges++;
2680 	  subst_insn = i3;
2681 	  subst_low_luid = DF_INSN_LUID (i2);
2682 	  added_sets_2 = added_sets_1 = 0;
2683 	  i2dest = SET_DEST (temp);
2684 	  i2dest_killed = dead_or_set_p (i2, i2dest);
2685 
2686 	  /* Replace the source in I2 with the new constant and make the
2687 	     resulting insn the new pattern for I3.  Then skip to where we
2688 	     validate the pattern.  Everything was set up above.  */
2689 	  SUBST (SET_SRC (temp),
2690 		 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp))));
2691 
2692 	  newpat = PATTERN (i2);
2693 
2694           /* The dest of I3 has been replaced with the dest of I2.  */
2695           changed_i3_dest = 1;
2696 	  goto validate_replacement;
2697 	}
2698     }
2699 
2700 #ifndef HAVE_cc0
2701   /* If we have no I1 and I2 looks like:
2702 	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2703 		   (set Y OP)])
2704      make up a dummy I1 that is
2705 	(set Y OP)
2706      and change I2 to be
2707 	(set (reg:CC X) (compare:CC Y (const_int 0)))
2708 
2709      (We can ignore any trailing CLOBBERs.)
2710 
2711      This undoes a previous combination and allows us to match a branch-and-
2712      decrement insn.  */
2713 
2714   if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
2715       && XVECLEN (PATTERN (i2), 0) >= 2
2716       && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
2717       && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2718 	  == MODE_CC)
2719       && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2720       && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2721       && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
2722       && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
2723       && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2724 		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
2725     {
2726       for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
2727 	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
2728 	  break;
2729 
2730       if (i == 1)
2731 	{
2732 	  /* We make I1 with the same INSN_UID as I2.  This gives it
2733 	     the same DF_INSN_LUID for value tracking.  Our fake I1 will
2734 	     never appear in the insn stream so giving it the same INSN_UID
2735 	     as I2 will not cause a problem.  */
2736 
2737 	  i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
2738 			     BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
2739 			     XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX);
2740 
2741 	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2742 	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2743 		 SET_DEST (PATTERN (i1)));
2744 	}
2745     }
2746 #endif
2747 
2748   /* Verify that I2 and I1 are valid for combining.  */
2749   if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
2750       || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
2751     {
2752       undo_all ();
2753       return 0;
2754     }
2755 
2756   /* Record whether I2DEST is used in I2SRC and similarly for the other
2757      cases.  Knowing this will help in register status updating below.  */
2758   i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2759   i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2760   i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2761   i2dest_killed = dead_or_set_p (i2, i2dest);
2762   i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2763 
2764   /* See if I1 directly feeds into I3.  It does if I1DEST is not used
2765      in I2SRC.  */
2766   i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2767 
2768   /* Ensure that I3's pattern can be the destination of combines.  */
2769   if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2770 			  i1 && i2dest_in_i1src && i1_feeds_i3,
2771 			  &i3dest_killed))
2772     {
2773       undo_all ();
2774       return 0;
2775     }
2776 
2777   /* See if any of the insns is a MULT operation.  Unless one is, we will
2778      reject a combination that is, since it must be slower.  Be conservative
2779      here.  */
2780   if (GET_CODE (i2src) == MULT
2781       || (i1 != 0 && GET_CODE (i1src) == MULT)
2782       || (GET_CODE (PATTERN (i3)) == SET
2783 	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2784     have_mult = 1;
2785 
2786   /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2787      We used to do this EXCEPT in one case: I3 has a post-inc in an
2788      output operand.  However, that exception can give rise to insns like
2789 	mov r3,(r3)+
2790      which is a famous insn on the PDP-11 where the value of r3 used as the
2791      source was model-dependent.  Avoid this sort of thing.  */
2792 
2793 #if 0
2794   if (!(GET_CODE (PATTERN (i3)) == SET
2795 	&& REG_P (SET_SRC (PATTERN (i3)))
2796 	&& MEM_P (SET_DEST (PATTERN (i3)))
2797 	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2798 	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2799     /* It's not the exception.  */
2800 #endif
2801 #ifdef AUTO_INC_DEC
2802     for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2803       if (REG_NOTE_KIND (link) == REG_INC
2804 	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2805 	      || (i1 != 0
2806 		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2807 	{
2808 	  undo_all ();
2809 	  return 0;
2810 	}
2811 #endif
2812 
2813   /* See if the SETs in I1 or I2 need to be kept around in the merged
2814      instruction: whenever the value set there is still needed past I3.
2815      For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2816 
2817      For the SET in I1, we have two cases:  If I1 and I2 independently
2818      feed into I3, the set in I1 needs to be kept around if I1DEST dies
2819      or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
2820      in I1 needs to be kept around unless I1DEST dies or is set in either
2821      I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
2822      I1DEST.  If so, we know I1 feeds into I2.  */
2823 
2824   added_sets_2 = ! dead_or_set_p (i3, i2dest);
2825 
2826   added_sets_1
2827     = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2828 	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2829 
2830   /* If the set in I2 needs to be kept around, we must make a copy of
2831      PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2832      PATTERN (I2), we are only substituting for the original I1DEST, not into
2833      an already-substituted copy.  This also prevents making self-referential
2834      rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2835      I2DEST.  */
2836 
2837   if (added_sets_2)
2838     {
2839       if (GET_CODE (PATTERN (i2)) == PARALLEL)
2840 	i2pat = gen_rtx_SET (VOIDmode, i2dest, copy_rtx (i2src));
2841       else
2842 	i2pat = copy_rtx (PATTERN (i2));
2843     }
2844 
2845   if (added_sets_1)
2846     {
2847       if (GET_CODE (PATTERN (i1)) == PARALLEL)
2848 	i1pat = gen_rtx_SET (VOIDmode, i1dest, copy_rtx (i1src));
2849       else
2850 	i1pat = copy_rtx (PATTERN (i1));
2851     }
2852 
2853   combine_merges++;
2854 
2855   /* Substitute in the latest insn for the regs set by the earlier ones.  */
2856 
2857   maxreg = max_reg_num ();
2858 
2859   subst_insn = i3;
2860 
2861 #ifndef HAVE_cc0
2862   /* Many machines that don't use CC0 have insns that can both perform an
2863      arithmetic operation and set the condition code.  These operations will
2864      be represented as a PARALLEL with the first element of the vector
2865      being a COMPARE of an arithmetic operation with the constant zero.
2866      The second element of the vector will set some pseudo to the result
2867      of the same arithmetic operation.  If we simplify the COMPARE, we won't
2868      match such a pattern and so will generate an extra insn.   Here we test
2869      for this case, where both the comparison and the operation result are
2870      needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2871      I2SRC.  Later we will make the PARALLEL that contains I2.  */
2872 
2873   if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2874       && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2875       && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2876       && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2877     {
2878 #ifdef SELECT_CC_MODE
2879       rtx *cc_use;
2880       enum machine_mode compare_mode;
2881 #endif
2882 
2883       newpat = PATTERN (i3);
2884       SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2885 
2886       i2_is_used = 1;
2887 
2888 #ifdef SELECT_CC_MODE
2889       /* See if a COMPARE with the operand we substituted in should be done
2890 	 with the mode that is currently being used.  If not, do the same
2891 	 processing we do in `subst' for a SET; namely, if the destination
2892 	 is used only once, try to replace it with a register of the proper
2893 	 mode and also replace the COMPARE.  */
2894       if (undobuf.other_insn == 0
2895 	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
2896 					&undobuf.other_insn))
2897 	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2898 					      i2src, const0_rtx))
2899 	      != GET_MODE (SET_DEST (newpat))))
2900 	{
2901 	  if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2902 				   compare_mode))
2903 	    {
2904 	      unsigned int regno = REGNO (SET_DEST (newpat));
2905 	      rtx new_dest;
2906 
2907 	      if (regno < FIRST_PSEUDO_REGISTER)
2908 		new_dest = gen_rtx_REG (compare_mode, regno);
2909 	      else
2910 		{
2911 		  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
2912 		  new_dest = regno_reg_rtx[regno];
2913 		}
2914 
2915 	      SUBST (SET_DEST (newpat), new_dest);
2916 	      SUBST (XEXP (*cc_use, 0), new_dest);
2917 	      SUBST (SET_SRC (newpat),
2918 		     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2919 	    }
2920 	  else
2921 	    undobuf.other_insn = 0;
2922 	}
2923 #endif
2924     }
2925   else
2926 #endif
2927     {
2928       /* It is possible that the source of I2 or I1 may be performing
2929 	 an unneeded operation, such as a ZERO_EXTEND of something
2930 	 that is known to have the high part zero.  Handle that case
2931 	 by letting subst look at the innermost one of them.
2932 
2933 	 Another way to do this would be to have a function that tries
2934 	 to simplify a single insn instead of merging two or more
2935 	 insns.  We don't do this because of the potential of infinite
2936 	 loops and because of the potential extra memory required.
2937 	 However, doing it the way we are is a bit of a kludge and
2938 	 doesn't catch all cases.
2939 
2940 	 But only do this if -fexpensive-optimizations since it slows
2941 	 things down and doesn't usually win.
2942 
2943 	 This is not done in the COMPARE case above because the
2944 	 unmodified I2PAT is used in the PARALLEL and so a pattern
2945 	 with a modified I2SRC would not match.  */
2946 
2947       if (flag_expensive_optimizations)
2948 	{
2949 	  /* Pass pc_rtx so no substitutions are done, just
2950 	     simplifications.  */
2951 	  if (i1)
2952 	    {
2953 	      subst_low_luid = DF_INSN_LUID (i1);
2954 	      i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2955 	    }
2956 	  else
2957 	    {
2958 	      subst_low_luid = DF_INSN_LUID (i2);
2959 	      i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2960 	    }
2961 	}
2962 
2963       n_occurrences = 0;		/* `subst' counts here */
2964 
2965       /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2966 	 need to make a unique copy of I2SRC each time we substitute it
2967 	 to avoid self-referential rtl.  */
2968 
2969       subst_low_luid = DF_INSN_LUID (i2);
2970       newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2971 		      ! i1_feeds_i3 && i1dest_in_i1src);
2972       substed_i2 = 1;
2973 
2974       /* Record whether i2's body now appears within i3's body.  */
2975       i2_is_used = n_occurrences;
2976     }
2977 
2978   /* If we already got a failure, don't try to do more.  Otherwise,
2979      try to substitute in I1 if we have it.  */
2980 
2981   if (i1 && GET_CODE (newpat) != CLOBBER)
2982     {
2983       /* Check that an autoincrement side-effect on I1 has not been lost.
2984 	 This happens if I1DEST is mentioned in I2 and dies there, and
2985 	 has disappeared from the new pattern.  */
2986       if ((FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2987 	   && !i1_feeds_i3
2988 	   && dead_or_set_p (i2, i1dest)
2989 	   && !reg_overlap_mentioned_p (i1dest, newpat))
2990 	  /* Before we can do this substitution, we must redo the test done
2991 	     above (see detailed comments there) that ensures  that I1DEST
2992 	     isn't mentioned in any SETs in NEWPAT that are field assignments.  */
2993           || !combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, 0, 0))
2994 	{
2995 	  undo_all ();
2996 	  return 0;
2997 	}
2998 
2999       n_occurrences = 0;
3000       subst_low_luid = DF_INSN_LUID (i1);
3001       newpat = subst (newpat, i1dest, i1src, 0, 0);
3002       substed_i1 = 1;
3003     }
3004 
3005   /* Fail if an autoincrement side-effect has been duplicated.  Be careful
3006      to count all the ways that I2SRC and I1SRC can be used.  */
3007   if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
3008        && i2_is_used + added_sets_2 > 1)
3009       || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
3010 	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
3011 	      > 1))
3012       /* Fail if we tried to make a new register.  */
3013       || max_reg_num () != maxreg
3014       /* Fail if we couldn't do something and have a CLOBBER.  */
3015       || GET_CODE (newpat) == CLOBBER
3016       /* Fail if this new pattern is a MULT and we didn't have one before
3017 	 at the outer level.  */
3018       || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
3019 	  && ! have_mult))
3020     {
3021       undo_all ();
3022       return 0;
3023     }
3024 
3025   /* If the actions of the earlier insns must be kept
3026      in addition to substituting them into the latest one,
3027      we must make a new PARALLEL for the latest insn
3028      to hold additional the SETs.  */
3029 
3030   if (added_sets_1 || added_sets_2)
3031     {
3032       combine_extras++;
3033 
3034       if (GET_CODE (newpat) == PARALLEL)
3035 	{
3036 	  rtvec old = XVEC (newpat, 0);
3037 	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
3038 	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3039 	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3040 		  sizeof (old->elem[0]) * old->num_elem);
3041 	}
3042       else
3043 	{
3044 	  rtx old = newpat;
3045 	  total_sets = 1 + added_sets_1 + added_sets_2;
3046 	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3047 	  XVECEXP (newpat, 0, 0) = old;
3048 	}
3049 
3050       if (added_sets_1)
3051 	XVECEXP (newpat, 0, --total_sets) = i1pat;
3052 
3053       if (added_sets_2)
3054 	{
3055 	  /* If there is no I1, use I2's body as is.  We used to also not do
3056 	     the subst call below if I2 was substituted into I3,
3057 	     but that could lose a simplification.  */
3058 	  if (i1 == 0)
3059 	    XVECEXP (newpat, 0, --total_sets) = i2pat;
3060 	  else
3061 	    /* See comment where i2pat is assigned.  */
3062 	    XVECEXP (newpat, 0, --total_sets)
3063 	      = subst (i2pat, i1dest, i1src, 0, 0);
3064 	}
3065     }
3066 
3067  validate_replacement:
3068 
3069   /* Note which hard regs this insn has as inputs.  */
3070   mark_used_regs_combine (newpat);
3071 
3072   /* If recog_for_combine fails, it strips existing clobbers.  If we'll
3073      consider splitting this pattern, we might need these clobbers.  */
3074   if (i1 && GET_CODE (newpat) == PARALLEL
3075       && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3076     {
3077       int len = XVECLEN (newpat, 0);
3078 
3079       newpat_vec_with_clobbers = rtvec_alloc (len);
3080       for (i = 0; i < len; i++)
3081 	RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3082     }
3083 
3084   /* Is the result of combination a valid instruction?  */
3085   insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3086 
3087   /* If the result isn't valid, see if it is a PARALLEL of two SETs where
3088      the second SET's destination is a register that is unused and isn't
3089      marked as an instruction that might trap in an EH region.  In that case,
3090      we just need the first SET.   This can occur when simplifying a divmod
3091      insn.  We *must* test for this case here because the code below that
3092      splits two independent SETs doesn't handle this case correctly when it
3093      updates the register status.
3094 
3095      It's pointless doing this if we originally had two sets, one from
3096      i3, and one from i2.  Combining then splitting the parallel results
3097      in the original i2 again plus an invalid insn (which we delete).
3098      The net effect is only to move instructions around, which makes
3099      debug info less accurate.
3100 
3101      Also check the case where the first SET's destination is unused.
3102      That would not cause incorrect code, but does cause an unneeded
3103      insn to remain.  */
3104 
3105   if (insn_code_number < 0
3106       && !(added_sets_2 && i1 == 0)
3107       && GET_CODE (newpat) == PARALLEL
3108       && XVECLEN (newpat, 0) == 2
3109       && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3110       && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3111       && asm_noperands (newpat) < 0)
3112     {
3113       rtx set0 = XVECEXP (newpat, 0, 0);
3114       rtx set1 = XVECEXP (newpat, 0, 1);
3115 
3116       if (((REG_P (SET_DEST (set1))
3117 	    && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3118 	   || (GET_CODE (SET_DEST (set1)) == SUBREG
3119 	       && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
3120 	  && insn_nothrow_p (i3)
3121 	  && !side_effects_p (SET_SRC (set1)))
3122 	{
3123 	  newpat = set0;
3124 	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3125 	}
3126 
3127       else if (((REG_P (SET_DEST (set0))
3128 		 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3129 		|| (GET_CODE (SET_DEST (set0)) == SUBREG
3130 		    && find_reg_note (i3, REG_UNUSED,
3131 				      SUBREG_REG (SET_DEST (set0)))))
3132 	       && insn_nothrow_p (i3)
3133 	       && !side_effects_p (SET_SRC (set0)))
3134 	{
3135 	  newpat = set1;
3136 	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3137 
3138 	  if (insn_code_number >= 0)
3139 	    changed_i3_dest = 1;
3140 	}
3141     }
3142 
3143   /* If we were combining three insns and the result is a simple SET
3144      with no ASM_OPERANDS that wasn't recognized, try to split it into two
3145      insns.  There are two ways to do this.  It can be split using a
3146      machine-specific method (like when you have an addition of a large
3147      constant) or by combine in the function find_split_point.  */
3148 
3149   if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3150       && asm_noperands (newpat) < 0)
3151     {
3152       rtx parallel, m_split, *split;
3153 
3154       /* See if the MD file can split NEWPAT.  If it can't, see if letting it
3155 	 use I2DEST as a scratch register will help.  In the latter case,
3156 	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
3157 
3158       m_split = combine_split_insns (newpat, i3);
3159 
3160       /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3161 	 inputs of NEWPAT.  */
3162 
3163       /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3164 	 possible to try that as a scratch reg.  This would require adding
3165 	 more code to make it work though.  */
3166 
3167       if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3168 	{
3169 	  enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
3170 
3171 	  /* First try to split using the original register as a
3172 	     scratch register.  */
3173 	  parallel = gen_rtx_PARALLEL (VOIDmode,
3174 				       gen_rtvec (2, newpat,
3175 						  gen_rtx_CLOBBER (VOIDmode,
3176 								   i2dest)));
3177 	  m_split = combine_split_insns (parallel, i3);
3178 
3179 	  /* If that didn't work, try changing the mode of I2DEST if
3180 	     we can.  */
3181 	  if (m_split == 0
3182 	      && new_mode != GET_MODE (i2dest)
3183 	      && new_mode != VOIDmode
3184 	      && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3185 	    {
3186 	      enum machine_mode old_mode = GET_MODE (i2dest);
3187 	      rtx ni2dest;
3188 
3189 	      if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3190 		ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3191 	      else
3192 		{
3193 		  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
3194 		  ni2dest = regno_reg_rtx[REGNO (i2dest)];
3195 		}
3196 
3197 	      parallel = (gen_rtx_PARALLEL
3198 			  (VOIDmode,
3199 			   gen_rtvec (2, newpat,
3200 				      gen_rtx_CLOBBER (VOIDmode,
3201 						       ni2dest))));
3202 	      m_split = combine_split_insns (parallel, i3);
3203 
3204 	      if (m_split == 0
3205 		  && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3206 		{
3207 		  struct undo *buf;
3208 
3209 		  adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
3210 		  buf = undobuf.undos;
3211 		  undobuf.undos = buf->next;
3212 		  buf->next = undobuf.frees;
3213 		  undobuf.frees = buf;
3214 		}
3215 	    }
3216 
3217 	  i2scratch = m_split != 0;
3218 	}
3219 
3220       /* If recog_for_combine has discarded clobbers, try to use them
3221 	 again for the split.  */
3222       if (m_split == 0 && newpat_vec_with_clobbers)
3223 	{
3224 	  parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3225 	  m_split = combine_split_insns (parallel, i3);
3226 	}
3227 
3228       if (m_split && NEXT_INSN (m_split) == NULL_RTX)
3229 	{
3230 	  m_split = PATTERN (m_split);
3231 	  insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
3232 	  if (insn_code_number >= 0)
3233 	    newpat = m_split;
3234 	}
3235       else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
3236 	       && (next_real_insn (i2) == i3
3237 		   || ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
3238 	{
3239 	  rtx i2set, i3set;
3240 	  rtx newi3pat = PATTERN (NEXT_INSN (m_split));
3241 	  newi2pat = PATTERN (m_split);
3242 
3243 	  i3set = single_set (NEXT_INSN (m_split));
3244 	  i2set = single_set (m_split);
3245 
3246 	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3247 
3248 	  /* If I2 or I3 has multiple SETs, we won't know how to track
3249 	     register status, so don't use these insns.  If I2's destination
3250 	     is used between I2 and I3, we also can't use these insns.  */
3251 
3252 	  if (i2_code_number >= 0 && i2set && i3set
3253 	      && (next_real_insn (i2) == i3
3254 		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
3255 	    insn_code_number = recog_for_combine (&newi3pat, i3,
3256 						  &new_i3_notes);
3257 	  if (insn_code_number >= 0)
3258 	    newpat = newi3pat;
3259 
3260 	  /* It is possible that both insns now set the destination of I3.
3261 	     If so, we must show an extra use of it.  */
3262 
3263 	  if (insn_code_number >= 0)
3264 	    {
3265 	      rtx new_i3_dest = SET_DEST (i3set);
3266 	      rtx new_i2_dest = SET_DEST (i2set);
3267 
3268 	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3269 		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3270 		     || GET_CODE (new_i3_dest) == SUBREG)
3271 		new_i3_dest = XEXP (new_i3_dest, 0);
3272 
3273 	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3274 		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3275 		     || GET_CODE (new_i2_dest) == SUBREG)
3276 		new_i2_dest = XEXP (new_i2_dest, 0);
3277 
3278 	      if (REG_P (new_i3_dest)
3279 		  && REG_P (new_i2_dest)
3280 		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
3281 		INC_REG_N_SETS (REGNO (new_i2_dest), 1);
3282 	    }
3283 	}
3284 
3285       /* If we can split it and use I2DEST, go ahead and see if that
3286 	 helps things be recognized.  Verify that none of the registers
3287 	 are set between I2 and I3.  */
3288       if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
3289 #ifdef HAVE_cc0
3290 	  && REG_P (i2dest)
3291 #endif
3292 	  /* We need I2DEST in the proper mode.  If it is a hard register
3293 	     or the only use of a pseudo, we can change its mode.
3294 	     Make sure we don't change a hard register to have a mode that
3295 	     isn't valid for it, or change the number of registers.  */
3296 	  && (GET_MODE (*split) == GET_MODE (i2dest)
3297 	      || GET_MODE (*split) == VOIDmode
3298 	      || can_change_dest_mode (i2dest, added_sets_2,
3299 				       GET_MODE (*split)))
3300 	  && (next_real_insn (i2) == i3
3301 	      || ! use_crosses_set_p (*split, DF_INSN_LUID (i2)))
3302 	  /* We can't overwrite I2DEST if its value is still used by
3303 	     NEWPAT.  */
3304 	  && ! reg_referenced_p (i2dest, newpat))
3305 	{
3306 	  rtx newdest = i2dest;
3307 	  enum rtx_code split_code = GET_CODE (*split);
3308 	  enum machine_mode split_mode = GET_MODE (*split);
3309 	  bool subst_done = false;
3310 	  newi2pat = NULL_RTX;
3311 
3312 	  i2scratch = true;
3313 
3314 	  /* Get NEWDEST as a register in the proper mode.  We have already
3315 	     validated that we can do this.  */
3316 	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
3317 	    {
3318 	      if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3319 		newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3320 	      else
3321 		{
3322 		  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
3323 		  newdest = regno_reg_rtx[REGNO (i2dest)];
3324 		}
3325 	    }
3326 
3327 	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3328 	     an ASHIFT.  This can occur if it was inside a PLUS and hence
3329 	     appeared to be a memory address.  This is a kludge.  */
3330 	  if (split_code == MULT
3331 	      && CONST_INT_P (XEXP (*split, 1))
3332 	      && INTVAL (XEXP (*split, 1)) > 0
3333 	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
3334 	    {
3335 	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
3336 					     XEXP (*split, 0), GEN_INT (i)));
3337 	      /* Update split_code because we may not have a multiply
3338 		 anymore.  */
3339 	      split_code = GET_CODE (*split);
3340 	    }
3341 
3342 #ifdef INSN_SCHEDULING
3343 	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3344 	     be written as a ZERO_EXTEND.  */
3345 	  if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
3346 	    {
3347 #ifdef LOAD_EXTEND_OP
3348 	      /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3349 		 what it really is.  */
3350 	      if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
3351 		  == SIGN_EXTEND)
3352 		SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3353 						    SUBREG_REG (*split)));
3354 	      else
3355 #endif
3356 		SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3357 						    SUBREG_REG (*split)));
3358 	    }
3359 #endif
3360 
3361 	  /* Attempt to split binary operators using arithmetic identities.  */
3362 	  if (BINARY_P (SET_SRC (newpat))
3363 	      && split_mode == GET_MODE (SET_SRC (newpat))
3364 	      && ! side_effects_p (SET_SRC (newpat)))
3365 	    {
3366 	      rtx setsrc = SET_SRC (newpat);
3367 	      enum machine_mode mode = GET_MODE (setsrc);
3368 	      enum rtx_code code = GET_CODE (setsrc);
3369 	      rtx src_op0 = XEXP (setsrc, 0);
3370 	      rtx src_op1 = XEXP (setsrc, 1);
3371 
3372 	      /* Split "X = Y op Y" as "Z = Y; X = Z op Z".  */
3373 	      if (rtx_equal_p (src_op0, src_op1))
3374 		{
3375 		  newi2pat = gen_rtx_SET (VOIDmode, newdest, src_op0);
3376 		  SUBST (XEXP (setsrc, 0), newdest);
3377 		  SUBST (XEXP (setsrc, 1), newdest);
3378 		  subst_done = true;
3379 		}
3380 	      /* Split "((P op Q) op R) op S" where op is PLUS or MULT.  */
3381 	      else if ((code == PLUS || code == MULT)
3382 		       && GET_CODE (src_op0) == code
3383 		       && GET_CODE (XEXP (src_op0, 0)) == code
3384 		       && (INTEGRAL_MODE_P (mode)
3385 			   || (FLOAT_MODE_P (mode)
3386 			       && flag_unsafe_math_optimizations)))
3387 		{
3388 		  rtx p = XEXP (XEXP (src_op0, 0), 0);
3389 		  rtx q = XEXP (XEXP (src_op0, 0), 1);
3390 		  rtx r = XEXP (src_op0, 1);
3391 		  rtx s = src_op1;
3392 
3393 		  /* Split both "((X op Y) op X) op Y" and
3394 		     "((X op Y) op Y) op X" as "T op T" where T is
3395 		     "X op Y".  */
3396 		  if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3397 		       || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3398 		    {
3399 		      newi2pat = gen_rtx_SET (VOIDmode, newdest,
3400 					      XEXP (src_op0, 0));
3401 		      SUBST (XEXP (setsrc, 0), newdest);
3402 		      SUBST (XEXP (setsrc, 1), newdest);
3403 		      subst_done = true;
3404 		    }
3405 		  /* Split "((X op X) op Y) op Y)" as "T op T" where
3406 		     T is "X op Y".  */
3407 		  else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3408 		    {
3409 		      rtx tmp = simplify_gen_binary (code, mode, p, r);
3410 		      newi2pat = gen_rtx_SET (VOIDmode, newdest, tmp);
3411 		      SUBST (XEXP (setsrc, 0), newdest);
3412 		      SUBST (XEXP (setsrc, 1), newdest);
3413 		      subst_done = true;
3414 		    }
3415 		}
3416 	    }
3417 
3418 	  if (!subst_done)
3419 	    {
3420 	      newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
3421 	      SUBST (*split, newdest);
3422 	    }
3423 
3424 	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3425 
3426 	  /* recog_for_combine might have added CLOBBERs to newi2pat.
3427 	     Make sure NEWPAT does not depend on the clobbered regs.  */
3428 	  if (GET_CODE (newi2pat) == PARALLEL)
3429 	    for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3430 	      if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3431 		{
3432 		  rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3433 		  if (reg_overlap_mentioned_p (reg, newpat))
3434 		    {
3435 		      undo_all ();
3436 		      return 0;
3437 		    }
3438 		}
3439 
3440 	  /* If the split point was a MULT and we didn't have one before,
3441 	     don't use one now.  */
3442 	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3443 	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3444 	}
3445     }
3446 
3447   /* Check for a case where we loaded from memory in a narrow mode and
3448      then sign extended it, but we need both registers.  In that case,
3449      we have a PARALLEL with both loads from the same memory location.
3450      We can split this into a load from memory followed by a register-register
3451      copy.  This saves at least one insn, more if register allocation can
3452      eliminate the copy.
3453 
3454      We cannot do this if the destination of the first assignment is a
3455      condition code register or cc0.  We eliminate this case by making sure
3456      the SET_DEST and SET_SRC have the same mode.
3457 
3458      We cannot do this if the destination of the second assignment is
3459      a register that we have already assumed is zero-extended.  Similarly
3460      for a SUBREG of such a register.  */
3461 
3462   else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3463 	   && GET_CODE (newpat) == PARALLEL
3464 	   && XVECLEN (newpat, 0) == 2
3465 	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3466 	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
3467 	   && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3468 	       == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
3469 	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3470 	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3471 			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3472 	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3473 				   DF_INSN_LUID (i2))
3474 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3475 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3476 	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
3477 		 (REG_P (temp)
3478 		  && VEC_index (reg_stat_type, reg_stat,
3479 				REGNO (temp))->nonzero_bits != 0
3480 		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3481 		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
3482 		  && (VEC_index (reg_stat_type, reg_stat,
3483 				 REGNO (temp))->nonzero_bits
3484 		      != GET_MODE_MASK (word_mode))))
3485 	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3486 		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
3487 		     (REG_P (temp)
3488 		      && VEC_index (reg_stat_type, reg_stat,
3489 				    REGNO (temp))->nonzero_bits != 0
3490 		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3491 		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
3492 		      && (VEC_index (reg_stat_type, reg_stat,
3493 				     REGNO (temp))->nonzero_bits
3494 			  != GET_MODE_MASK (word_mode)))))
3495 	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3496 					 SET_SRC (XVECEXP (newpat, 0, 1)))
3497 	   && ! find_reg_note (i3, REG_UNUSED,
3498 			       SET_DEST (XVECEXP (newpat, 0, 0))))
3499     {
3500       rtx ni2dest;
3501 
3502       newi2pat = XVECEXP (newpat, 0, 0);
3503       ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
3504       newpat = XVECEXP (newpat, 0, 1);
3505       SUBST (SET_SRC (newpat),
3506 	     gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
3507       i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3508 
3509       if (i2_code_number >= 0)
3510 	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3511 
3512       if (insn_code_number >= 0)
3513 	swap_i2i3 = 1;
3514     }
3515 
3516   /* Similarly, check for a case where we have a PARALLEL of two independent
3517      SETs but we started with three insns.  In this case, we can do the sets
3518      as two separate insns.  This case occurs when some SET allows two
3519      other insns to combine, but the destination of that SET is still live.  */
3520 
3521   else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3522 	   && GET_CODE (newpat) == PARALLEL
3523 	   && XVECLEN (newpat, 0) == 2
3524 	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3525 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
3526 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
3527 	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3528 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3529 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3530 	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3531 				   DF_INSN_LUID (i2))
3532 	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3533 				  XVECEXP (newpat, 0, 0))
3534 	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
3535 				  XVECEXP (newpat, 0, 1))
3536 	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
3537 		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))
3538 #ifdef HAVE_cc0
3539 	   /* We cannot split the parallel into two sets if both sets
3540 	      reference cc0.  */
3541 	   && ! (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))
3542 		 && reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 1)))
3543 #endif
3544 	   )
3545     {
3546       /* Normally, it doesn't matter which of the two is done first,
3547 	 but it does if one references cc0.  In that case, it has to
3548 	 be first.  */
3549 #ifdef HAVE_cc0
3550       if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
3551 	{
3552 	  if (use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 0)),
3553 				 DF_INSN_LUID (i2)))
3554 	    {
3555 	      undo_all ();
3556 	      return 0;
3557 	    }
3558 	  newi2pat = XVECEXP (newpat, 0, 0);
3559 	  newpat = XVECEXP (newpat, 0, 1);
3560 	}
3561       else
3562 #endif
3563 	{
3564 	  newi2pat = XVECEXP (newpat, 0, 1);
3565 	  newpat = XVECEXP (newpat, 0, 0);
3566 	}
3567 
3568       i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3569 
3570       if (i2_code_number >= 0)
3571 	{
3572 	  /* recog_for_combine might have added CLOBBERs to newi2pat.
3573 	     Make sure NEWPAT does not depend on the clobbered regs.  */
3574 	  if (GET_CODE (newi2pat) == PARALLEL)
3575 	    {
3576 	      for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3577 		if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3578 		  {
3579 		    rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3580 		    if (reg_overlap_mentioned_p (reg, newpat))
3581 		      {
3582 			undo_all ();
3583 			return 0;
3584 		      }
3585 		  }
3586 	    }
3587 
3588 	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3589 	}
3590     }
3591 
3592   /* If it still isn't recognized, fail and change things back the way they
3593      were.  */
3594   if ((insn_code_number < 0
3595        /* Is the result a reasonable ASM_OPERANDS?  */
3596        && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
3597     {
3598       undo_all ();
3599       return 0;
3600     }
3601 
3602   /* If we had to change another insn, make sure it is valid also.  */
3603   if (undobuf.other_insn)
3604     {
3605       CLEAR_HARD_REG_SET (newpat_used_regs);
3606 
3607       other_pat = PATTERN (undobuf.other_insn);
3608       other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
3609 					     &new_other_notes);
3610 
3611       if (other_code_number < 0 && ! check_asm_operands (other_pat))
3612 	{
3613 	  undo_all ();
3614 	  return 0;
3615 	}
3616     }
3617 
3618 #ifdef HAVE_cc0
3619   /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
3620      they are adjacent to each other or not.  */
3621   {
3622     rtx p = prev_nonnote_insn (i3);
3623     if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
3624 	&& sets_cc0_p (newi2pat))
3625       {
3626 	undo_all ();
3627 	return 0;
3628       }
3629   }
3630 #endif
3631 
3632   /* Only allow this combination if insn_rtx_costs reports that the
3633      replacement instructions are cheaper than the originals.  */
3634   if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat, other_pat))
3635     {
3636       undo_all ();
3637       return 0;
3638     }
3639 
3640   if (MAY_HAVE_DEBUG_INSNS)
3641     {
3642       struct undo *undo;
3643 
3644       for (undo = undobuf.undos; undo; undo = undo->next)
3645 	if (undo->kind == UNDO_MODE)
3646 	  {
3647 	    rtx reg = *undo->where.r;
3648 	    enum machine_mode new_mode = GET_MODE (reg);
3649 	    enum machine_mode old_mode = undo->old_contents.m;
3650 
3651 	    /* Temporarily revert mode back.  */
3652 	    adjust_reg_mode (reg, old_mode);
3653 
3654 	    if (reg == i2dest && i2scratch)
3655 	      {
3656 		/* If we used i2dest as a scratch register with a
3657 		   different mode, substitute it for the original
3658 		   i2src while its original mode is temporarily
3659 		   restored, and then clear i2scratch so that we don't
3660 		   do it again later.  */
3661 		propagate_for_debug (i2, i3, reg, i2src, false);
3662 		i2scratch = false;
3663 		/* Put back the new mode.  */
3664 		adjust_reg_mode (reg, new_mode);
3665 	      }
3666 	    else
3667 	      {
3668 		rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
3669 		rtx first, last;
3670 
3671 		if (reg == i2dest)
3672 		  {
3673 		    first = i2;
3674 		    last = i3;
3675 		  }
3676 		else
3677 		  {
3678 		    first = i3;
3679 		    last = undobuf.other_insn;
3680 		    gcc_assert (last);
3681 		  }
3682 
3683 		/* We're dealing with a reg that changed mode but not
3684 		   meaning, so we want to turn it into a subreg for
3685 		   the new mode.  However, because of REG sharing and
3686 		   because its mode had already changed, we have to do
3687 		   it in two steps.  First, replace any debug uses of
3688 		   reg, with its original mode temporarily restored,
3689 		   with this copy we have created; then, replace the
3690 		   copy with the SUBREG of the original shared reg,
3691 		   once again changed to the new mode.  */
3692 		propagate_for_debug (first, last, reg, tempreg, false);
3693 		adjust_reg_mode (reg, new_mode);
3694 		propagate_for_debug (first, last, tempreg,
3695 				     lowpart_subreg (old_mode, reg, new_mode),
3696 				     false);
3697 	      }
3698 	  }
3699     }
3700 
3701   /* If we will be able to accept this, we have made a
3702      change to the destination of I3.  This requires us to
3703      do a few adjustments.  */
3704 
3705   if (changed_i3_dest)
3706     {
3707       PATTERN (i3) = newpat;
3708       adjust_for_new_dest (i3);
3709     }
3710 
3711   /* We now know that we can do this combination.  Merge the insns and
3712      update the status of registers and LOG_LINKS.  */
3713 
3714   if (undobuf.other_insn)
3715     {
3716       rtx note, next;
3717 
3718       PATTERN (undobuf.other_insn) = other_pat;
3719 
3720       /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
3721 	 are still valid.  Then add any non-duplicate notes added by
3722 	 recog_for_combine.  */
3723       for (note = REG_NOTES (undobuf.other_insn); note; note = next)
3724 	{
3725 	  next = XEXP (note, 1);
3726 
3727 	  if (REG_NOTE_KIND (note) == REG_UNUSED
3728 	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
3729 	    remove_note (undobuf.other_insn, note);
3730 	}
3731 
3732       distribute_notes (new_other_notes, undobuf.other_insn,
3733 			undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
3734     }
3735 
3736   if (swap_i2i3)
3737     {
3738       rtx insn;
3739       rtx link;
3740       rtx ni2dest;
3741 
3742       /* I3 now uses what used to be its destination and which is now
3743 	 I2's destination.  This requires us to do a few adjustments.  */
3744       PATTERN (i3) = newpat;
3745       adjust_for_new_dest (i3);
3746 
3747       /* We need a LOG_LINK from I3 to I2.  But we used to have one,
3748 	 so we still will.
3749 
3750 	 However, some later insn might be using I2's dest and have
3751 	 a LOG_LINK pointing at I3.  We must remove this link.
3752 	 The simplest way to remove the link is to point it at I1,
3753 	 which we know will be a NOTE.  */
3754 
3755       /* newi2pat is usually a SET here; however, recog_for_combine might
3756 	 have added some clobbers.  */
3757       if (GET_CODE (newi2pat) == PARALLEL)
3758 	ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
3759       else
3760 	ni2dest = SET_DEST (newi2pat);
3761 
3762       for (insn = NEXT_INSN (i3);
3763 	   insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3764 		    || insn != BB_HEAD (this_basic_block->next_bb));
3765 	   insn = NEXT_INSN (insn))
3766 	{
3767 	  if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
3768 	    {
3769 	      for (link = LOG_LINKS (insn); link;
3770 		   link = XEXP (link, 1))
3771 		if (XEXP (link, 0) == i3)
3772 		  XEXP (link, 0) = i1;
3773 
3774 	      break;
3775 	    }
3776 	}
3777     }
3778 
3779   {
3780     rtx i3notes, i2notes, i1notes = 0;
3781     rtx i3links, i2links, i1links = 0;
3782     rtx midnotes = 0;
3783     unsigned int regno;
3784     /* Compute which registers we expect to eliminate.  newi2pat may be setting
3785        either i3dest or i2dest, so we must check it.  Also, i1dest may be the
3786        same as i3dest, in which case newi2pat may be setting i1dest.  */
3787     rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
3788 		   || i2dest_in_i2src || i2dest_in_i1src
3789 		   || !i2dest_killed
3790 		   ? 0 : i2dest);
3791     rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
3792 		   || (newi2pat && reg_set_p (i1dest, newi2pat))
3793 		   || !i1dest_killed
3794 		   ? 0 : i1dest);
3795 
3796     /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3797        clear them.  */
3798     i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
3799     i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
3800     if (i1)
3801       i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
3802 
3803     /* Ensure that we do not have something that should not be shared but
3804        occurs multiple times in the new insns.  Check this by first
3805        resetting all the `used' flags and then copying anything is shared.  */
3806 
3807     reset_used_flags (i3notes);
3808     reset_used_flags (i2notes);
3809     reset_used_flags (i1notes);
3810     reset_used_flags (newpat);
3811     reset_used_flags (newi2pat);
3812     if (undobuf.other_insn)
3813       reset_used_flags (PATTERN (undobuf.other_insn));
3814 
3815     i3notes = copy_rtx_if_shared (i3notes);
3816     i2notes = copy_rtx_if_shared (i2notes);
3817     i1notes = copy_rtx_if_shared (i1notes);
3818     newpat = copy_rtx_if_shared (newpat);
3819     newi2pat = copy_rtx_if_shared (newi2pat);
3820     if (undobuf.other_insn)
3821       reset_used_flags (PATTERN (undobuf.other_insn));
3822 
3823     INSN_CODE (i3) = insn_code_number;
3824     PATTERN (i3) = newpat;
3825 
3826     if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
3827       {
3828 	rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
3829 
3830 	reset_used_flags (call_usage);
3831 	call_usage = copy_rtx (call_usage);
3832 
3833 	if (substed_i2)
3834 	  replace_rtx (call_usage, i2dest, i2src);
3835 
3836 	if (substed_i1)
3837 	  replace_rtx (call_usage, i1dest, i1src);
3838 
3839 	CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
3840       }
3841 
3842     if (undobuf.other_insn)
3843       INSN_CODE (undobuf.other_insn) = other_code_number;
3844 
3845     /* We had one special case above where I2 had more than one set and
3846        we replaced a destination of one of those sets with the destination
3847        of I3.  In that case, we have to update LOG_LINKS of insns later
3848        in this basic block.  Note that this (expensive) case is rare.
3849 
3850        Also, in this case, we must pretend that all REG_NOTEs for I2
3851        actually came from I3, so that REG_UNUSED notes from I2 will be
3852        properly handled.  */
3853 
3854     if (i3_subst_into_i2)
3855       {
3856 	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
3857 	  if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
3858 	       || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
3859 	      && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
3860 	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
3861 	      && ! find_reg_note (i2, REG_UNUSED,
3862 				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
3863 	    for (temp = NEXT_INSN (i2);
3864 		 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3865 			  || BB_HEAD (this_basic_block) != temp);
3866 		 temp = NEXT_INSN (temp))
3867 	      if (temp != i3 && INSN_P (temp))
3868 		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
3869 		  if (XEXP (link, 0) == i2)
3870 		    XEXP (link, 0) = i3;
3871 
3872 	if (i3notes)
3873 	  {
3874 	    rtx link = i3notes;
3875 	    while (XEXP (link, 1))
3876 	      link = XEXP (link, 1);
3877 	    XEXP (link, 1) = i2notes;
3878 	  }
3879 	else
3880 	  i3notes = i2notes;
3881 	i2notes = 0;
3882       }
3883 
3884     LOG_LINKS (i3) = 0;
3885     REG_NOTES (i3) = 0;
3886     LOG_LINKS (i2) = 0;
3887     REG_NOTES (i2) = 0;
3888 
3889     if (newi2pat)
3890       {
3891 	if (MAY_HAVE_DEBUG_INSNS && i2scratch)
3892 	  propagate_for_debug (i2, i3, i2dest, i2src, false);
3893 	INSN_CODE (i2) = i2_code_number;
3894 	PATTERN (i2) = newi2pat;
3895       }
3896     else
3897       {
3898 	if (MAY_HAVE_DEBUG_INSNS && i2src)
3899 	  propagate_for_debug (i2, i3, i2dest, i2src, i3_subst_into_i2);
3900 	SET_INSN_DELETED (i2);
3901       }
3902 
3903     if (i1)
3904       {
3905 	LOG_LINKS (i1) = 0;
3906 	REG_NOTES (i1) = 0;
3907 	if (MAY_HAVE_DEBUG_INSNS)
3908 	  propagate_for_debug (i1, i3, i1dest, i1src, false);
3909 	SET_INSN_DELETED (i1);
3910       }
3911 
3912     /* Get death notes for everything that is now used in either I3 or
3913        I2 and used to die in a previous insn.  If we built two new
3914        patterns, move from I1 to I2 then I2 to I3 so that we get the
3915        proper movement on registers that I2 modifies.  */
3916 
3917     if (newi2pat)
3918       {
3919 	move_deaths (newi2pat, NULL_RTX, DF_INSN_LUID (i1), i2, &midnotes);
3920 	move_deaths (newpat, newi2pat, DF_INSN_LUID (i1), i3, &midnotes);
3921       }
3922     else
3923       move_deaths (newpat, NULL_RTX, i1 ? DF_INSN_LUID (i1) : DF_INSN_LUID (i2),
3924 		   i3, &midnotes);
3925 
3926     /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
3927     if (i3notes)
3928       distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
3929 			elim_i2, elim_i1);
3930     if (i2notes)
3931       distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
3932 			elim_i2, elim_i1);
3933     if (i1notes)
3934       distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
3935 			elim_i2, elim_i1);
3936     if (midnotes)
3937       distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3938 			elim_i2, elim_i1);
3939 
3940     /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
3941        know these are REG_UNUSED and want them to go to the desired insn,
3942        so we always pass it as i3.  */
3943 
3944     if (newi2pat && new_i2_notes)
3945       distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3946 
3947     if (new_i3_notes)
3948       distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3949 
3950     /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
3951        put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
3952        I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
3953        in that case, it might delete I2.  Similarly for I2 and I1.
3954        Show an additional death due to the REG_DEAD note we make here.  If
3955        we discard it in distribute_notes, we will decrement it again.  */
3956 
3957     if (i3dest_killed)
3958       {
3959 	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3960 	  distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3961 					    NULL_RTX),
3962 			    NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3963 	else
3964 	  distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3965 					    NULL_RTX),
3966 			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3967 			    elim_i2, elim_i1);
3968       }
3969 
3970     if (i2dest_in_i2src)
3971       {
3972 	if (newi2pat && reg_set_p (i2dest, newi2pat))
3973 	  distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
3974 			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3975 	else
3976 	  distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
3977 			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3978 			    NULL_RTX, NULL_RTX);
3979       }
3980 
3981     if (i1dest_in_i1src)
3982       {
3983 	if (newi2pat && reg_set_p (i1dest, newi2pat))
3984 	  distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
3985 			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3986 	else
3987 	  distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
3988 			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3989 			    NULL_RTX, NULL_RTX);
3990       }
3991 
3992     distribute_links (i3links);
3993     distribute_links (i2links);
3994     distribute_links (i1links);
3995 
3996     if (REG_P (i2dest))
3997       {
3998 	rtx link;
3999 	rtx i2_insn = 0, i2_val = 0, set;
4000 
4001 	/* The insn that used to set this register doesn't exist, and
4002 	   this life of the register may not exist either.  See if one of
4003 	   I3's links points to an insn that sets I2DEST.  If it does,
4004 	   that is now the last known value for I2DEST. If we don't update
4005 	   this and I2 set the register to a value that depended on its old
4006 	   contents, we will get confused.  If this insn is used, thing
4007 	   will be set correctly in combine_instructions.  */
4008 
4009 	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
4010 	  if ((set = single_set (XEXP (link, 0))) != 0
4011 	      && rtx_equal_p (i2dest, SET_DEST (set)))
4012 	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
4013 
4014 	record_value_for_reg (i2dest, i2_insn, i2_val);
4015 
4016 	/* If the reg formerly set in I2 died only once and that was in I3,
4017 	   zero its use count so it won't make `reload' do any work.  */
4018 	if (! added_sets_2
4019 	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
4020 	    && ! i2dest_in_i2src)
4021 	  {
4022 	    regno = REGNO (i2dest);
4023 	    INC_REG_N_SETS (regno, -1);
4024 	  }
4025       }
4026 
4027     if (i1 && REG_P (i1dest))
4028       {
4029 	rtx link;
4030 	rtx i1_insn = 0, i1_val = 0, set;
4031 
4032 	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
4033 	  if ((set = single_set (XEXP (link, 0))) != 0
4034 	      && rtx_equal_p (i1dest, SET_DEST (set)))
4035 	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
4036 
4037 	record_value_for_reg (i1dest, i1_insn, i1_val);
4038 
4039 	regno = REGNO (i1dest);
4040 	if (! added_sets_1 && ! i1dest_in_i1src)
4041 	  INC_REG_N_SETS (regno, -1);
4042       }
4043 
4044     /* Update reg_stat[].nonzero_bits et al for any changes that may have
4045        been made to this insn.  The order of
4046        set_nonzero_bits_and_sign_copies() is important.  Because newi2pat
4047        can affect nonzero_bits of newpat */
4048     if (newi2pat)
4049       note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
4050     note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
4051   }
4052 
4053   if (undobuf.other_insn != NULL_RTX)
4054     {
4055       if (dump_file)
4056 	{
4057 	  fprintf (dump_file, "modifying other_insn ");
4058 	  dump_insn_slim (dump_file, undobuf.other_insn);
4059 	}
4060       df_insn_rescan (undobuf.other_insn);
4061     }
4062 
4063   if (i1 && !(NOTE_P(i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4064     {
4065       if (dump_file)
4066 	{
4067 	  fprintf (dump_file, "modifying insn i1 ");
4068 	  dump_insn_slim (dump_file, i1);
4069 	}
4070       df_insn_rescan (i1);
4071     }
4072 
4073   if (i2 && !(NOTE_P(i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4074     {
4075       if (dump_file)
4076 	{
4077 	  fprintf (dump_file, "modifying insn i2 ");
4078 	  dump_insn_slim (dump_file, i2);
4079 	}
4080       df_insn_rescan (i2);
4081     }
4082 
4083   if (i3 && !(NOTE_P(i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4084     {
4085       if (dump_file)
4086 	{
4087 	  fprintf (dump_file, "modifying insn i3 ");
4088 	  dump_insn_slim (dump_file, i3);
4089 	}
4090       df_insn_rescan (i3);
4091     }
4092 
4093   /* Set new_direct_jump_p if a new return or simple jump instruction
4094      has been created.  Adjust the CFG accordingly.  */
4095 
4096   if (returnjump_p (i3) || any_uncondjump_p (i3))
4097     {
4098       *new_direct_jump_p = 1;
4099       mark_jump_label (PATTERN (i3), i3, 0);
4100       update_cfg_for_uncondjump (i3);
4101     }
4102 
4103   if (undobuf.other_insn != NULL_RTX
4104       && (returnjump_p (undobuf.other_insn)
4105 	  || any_uncondjump_p (undobuf.other_insn)))
4106     {
4107       *new_direct_jump_p = 1;
4108       update_cfg_for_uncondjump (undobuf.other_insn);
4109     }
4110 
4111   /* A noop might also need cleaning up of CFG, if it comes from the
4112      simplification of a jump.  */
4113   if (GET_CODE (newpat) == SET
4114       && SET_SRC (newpat) == pc_rtx
4115       && SET_DEST (newpat) == pc_rtx)
4116     {
4117       *new_direct_jump_p = 1;
4118       update_cfg_for_uncondjump (i3);
4119     }
4120 
4121   combine_successes++;
4122   undo_commit ();
4123 
4124   if (added_links_insn
4125       && (newi2pat == 0 || DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i2))
4126       && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i3))
4127     return added_links_insn;
4128   else
4129     return newi2pat ? i2 : i3;
4130 }
4131 
4132 /* Undo all the modifications recorded in undobuf.  */
4133 
4134 static void
4135 undo_all (void)
4136 {
4137   struct undo *undo, *next;
4138 
4139   for (undo = undobuf.undos; undo; undo = next)
4140     {
4141       next = undo->next;
4142       switch (undo->kind)
4143 	{
4144 	case UNDO_RTX:
4145 	  *undo->where.r = undo->old_contents.r;
4146 	  break;
4147 	case UNDO_INT:
4148 	  *undo->where.i = undo->old_contents.i;
4149 	  break;
4150 	case UNDO_MODE:
4151 	  adjust_reg_mode (*undo->where.r, undo->old_contents.m);
4152 	  break;
4153 	default:
4154 	  gcc_unreachable ();
4155 	}
4156 
4157       undo->next = undobuf.frees;
4158       undobuf.frees = undo;
4159     }
4160 
4161   undobuf.undos = 0;
4162 }
4163 
4164 /* We've committed to accepting the changes we made.  Move all
4165    of the undos to the free list.  */
4166 
4167 static void
4168 undo_commit (void)
4169 {
4170   struct undo *undo, *next;
4171 
4172   for (undo = undobuf.undos; undo; undo = next)
4173     {
4174       next = undo->next;
4175       undo->next = undobuf.frees;
4176       undobuf.frees = undo;
4177     }
4178   undobuf.undos = 0;
4179 }
4180 
4181 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
4182    where we have an arithmetic expression and return that point.  LOC will
4183    be inside INSN.
4184 
4185    try_combine will call this function to see if an insn can be split into
4186    two insns.  */
4187 
4188 static rtx *
4189 find_split_point (rtx *loc, rtx insn)
4190 {
4191   rtx x = *loc;
4192   enum rtx_code code = GET_CODE (x);
4193   rtx *split;
4194   unsigned HOST_WIDE_INT len = 0;
4195   HOST_WIDE_INT pos = 0;
4196   int unsignedp = 0;
4197   rtx inner = NULL_RTX;
4198 
4199   /* First special-case some codes.  */
4200   switch (code)
4201     {
4202     case SUBREG:
4203 #ifdef INSN_SCHEDULING
4204       /* If we are making a paradoxical SUBREG invalid, it becomes a split
4205 	 point.  */
4206       if (MEM_P (SUBREG_REG (x)))
4207 	return loc;
4208 #endif
4209       return find_split_point (&SUBREG_REG (x), insn);
4210 
4211     case MEM:
4212 #ifdef HAVE_lo_sum
4213       /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4214 	 using LO_SUM and HIGH.  */
4215       if (GET_CODE (XEXP (x, 0)) == CONST
4216 	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
4217 	{
4218 	  enum machine_mode address_mode
4219 	    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
4220 
4221 	  SUBST (XEXP (x, 0),
4222 		 gen_rtx_LO_SUM (address_mode,
4223 				 gen_rtx_HIGH (address_mode, XEXP (x, 0)),
4224 				 XEXP (x, 0)));
4225 	  return &XEXP (XEXP (x, 0), 0);
4226 	}
4227 #endif
4228 
4229       /* If we have a PLUS whose second operand is a constant and the
4230 	 address is not valid, perhaps will can split it up using
4231 	 the machine-specific way to split large constants.  We use
4232 	 the first pseudo-reg (one of the virtual regs) as a placeholder;
4233 	 it will not remain in the result.  */
4234       if (GET_CODE (XEXP (x, 0)) == PLUS
4235 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
4236 	  && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4237 					    MEM_ADDR_SPACE (x)))
4238 	{
4239 	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
4240 	  rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
4241 						      XEXP (x, 0)),
4242 					 subst_insn);
4243 
4244 	  /* This should have produced two insns, each of which sets our
4245 	     placeholder.  If the source of the second is a valid address,
4246 	     we can make put both sources together and make a split point
4247 	     in the middle.  */
4248 
4249 	  if (seq
4250 	      && NEXT_INSN (seq) != NULL_RTX
4251 	      && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4252 	      && NONJUMP_INSN_P (seq)
4253 	      && GET_CODE (PATTERN (seq)) == SET
4254 	      && SET_DEST (PATTERN (seq)) == reg
4255 	      && ! reg_mentioned_p (reg,
4256 				    SET_SRC (PATTERN (seq)))
4257 	      && NONJUMP_INSN_P (NEXT_INSN (seq))
4258 	      && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4259 	      && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
4260 	      && memory_address_addr_space_p
4261 		   (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4262 		    MEM_ADDR_SPACE (x)))
4263 	    {
4264 	      rtx src1 = SET_SRC (PATTERN (seq));
4265 	      rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
4266 
4267 	      /* Replace the placeholder in SRC2 with SRC1.  If we can
4268 		 find where in SRC2 it was placed, that can become our
4269 		 split point and we can replace this address with SRC2.
4270 		 Just try two obvious places.  */
4271 
4272 	      src2 = replace_rtx (src2, reg, src1);
4273 	      split = 0;
4274 	      if (XEXP (src2, 0) == src1)
4275 		split = &XEXP (src2, 0);
4276 	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4277 		       && XEXP (XEXP (src2, 0), 0) == src1)
4278 		split = &XEXP (XEXP (src2, 0), 0);
4279 
4280 	      if (split)
4281 		{
4282 		  SUBST (XEXP (x, 0), src2);
4283 		  return split;
4284 		}
4285 	    }
4286 
4287 	  /* If that didn't work, perhaps the first operand is complex and
4288 	     needs to be computed separately, so make a split point there.
4289 	     This will occur on machines that just support REG + CONST
4290 	     and have a constant moved through some previous computation.  */
4291 
4292 	  else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
4293 		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4294 			 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4295 	    return &XEXP (XEXP (x, 0), 0);
4296 	}
4297 
4298       /* If we have a PLUS whose first operand is complex, try computing it
4299          separately by making a split there.  */
4300       if (GET_CODE (XEXP (x, 0)) == PLUS
4301           && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4302 					    MEM_ADDR_SPACE (x))
4303           && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
4304           && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4305                 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4306         return &XEXP (XEXP (x, 0), 0);
4307       break;
4308 
4309     case SET:
4310 #ifdef HAVE_cc0
4311       /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
4312 	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
4313 	 we need to put the operand into a register.  So split at that
4314 	 point.  */
4315 
4316       if (SET_DEST (x) == cc0_rtx
4317 	  && GET_CODE (SET_SRC (x)) != COMPARE
4318 	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
4319 	  && !OBJECT_P (SET_SRC (x))
4320 	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
4321 		&& OBJECT_P (SUBREG_REG (SET_SRC (x)))))
4322 	return &SET_SRC (x);
4323 #endif
4324 
4325       /* See if we can split SET_SRC as it stands.  */
4326       split = find_split_point (&SET_SRC (x), insn);
4327       if (split && split != &SET_SRC (x))
4328 	return split;
4329 
4330       /* See if we can split SET_DEST as it stands.  */
4331       split = find_split_point (&SET_DEST (x), insn);
4332       if (split && split != &SET_DEST (x))
4333 	return split;
4334 
4335       /* See if this is a bitfield assignment with everything constant.  If
4336 	 so, this is an IOR of an AND, so split it into that.  */
4337       if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4338 	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
4339 	      <= HOST_BITS_PER_WIDE_INT)
4340 	  && CONST_INT_P (XEXP (SET_DEST (x), 1))
4341 	  && CONST_INT_P (XEXP (SET_DEST (x), 2))
4342 	  && CONST_INT_P (SET_SRC (x))
4343 	  && ((INTVAL (XEXP (SET_DEST (x), 1))
4344 	       + INTVAL (XEXP (SET_DEST (x), 2)))
4345 	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
4346 	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
4347 	{
4348 	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
4349 	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
4350 	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
4351 	  rtx dest = XEXP (SET_DEST (x), 0);
4352 	  enum machine_mode mode = GET_MODE (dest);
4353 	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
4354 	  rtx or_mask;
4355 
4356 	  if (BITS_BIG_ENDIAN)
4357 	    pos = GET_MODE_BITSIZE (mode) - len - pos;
4358 
4359 	  or_mask = gen_int_mode (src << pos, mode);
4360 	  if (src == mask)
4361 	    SUBST (SET_SRC (x),
4362 		   simplify_gen_binary (IOR, mode, dest, or_mask));
4363 	  else
4364 	    {
4365 	      rtx negmask = gen_int_mode (~(mask << pos), mode);
4366 	      SUBST (SET_SRC (x),
4367 		     simplify_gen_binary (IOR, mode,
4368 					  simplify_gen_binary (AND, mode,
4369 							       dest, negmask),
4370 					  or_mask));
4371 	    }
4372 
4373 	  SUBST (SET_DEST (x), dest);
4374 
4375 	  split = find_split_point (&SET_SRC (x), insn);
4376 	  if (split && split != &SET_SRC (x))
4377 	    return split;
4378 	}
4379 
4380       /* Otherwise, see if this is an operation that we can split into two.
4381 	 If so, try to split that.  */
4382       code = GET_CODE (SET_SRC (x));
4383 
4384       switch (code)
4385 	{
4386 	case AND:
4387 	  /* If we are AND'ing with a large constant that is only a single
4388 	     bit and the result is only being used in a context where we
4389 	     need to know if it is zero or nonzero, replace it with a bit
4390 	     extraction.  This will avoid the large constant, which might
4391 	     have taken more than one insn to make.  If the constant were
4392 	     not a valid argument to the AND but took only one insn to make,
4393 	     this is no worse, but if it took more than one insn, it will
4394 	     be better.  */
4395 
4396 	  if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4397 	      && REG_P (XEXP (SET_SRC (x), 0))
4398 	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
4399 	      && REG_P (SET_DEST (x))
4400 	      && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
4401 	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
4402 	      && XEXP (*split, 0) == SET_DEST (x)
4403 	      && XEXP (*split, 1) == const0_rtx)
4404 	    {
4405 	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
4406 						XEXP (SET_SRC (x), 0),
4407 						pos, NULL_RTX, 1, 1, 0, 0);
4408 	      if (extraction != 0)
4409 		{
4410 		  SUBST (SET_SRC (x), extraction);
4411 		  return find_split_point (loc, insn);
4412 		}
4413 	    }
4414 	  break;
4415 
4416 	case NE:
4417 	  /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
4418 	     is known to be on, this can be converted into a NEG of a shift.  */
4419 	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
4420 	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4421 	      && 1 <= (pos = exact_log2
4422 		       (nonzero_bits (XEXP (SET_SRC (x), 0),
4423 				      GET_MODE (XEXP (SET_SRC (x), 0))))))
4424 	    {
4425 	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
4426 
4427 	      SUBST (SET_SRC (x),
4428 		     gen_rtx_NEG (mode,
4429 				  gen_rtx_LSHIFTRT (mode,
4430 						    XEXP (SET_SRC (x), 0),
4431 						    GEN_INT (pos))));
4432 
4433 	      split = find_split_point (&SET_SRC (x), insn);
4434 	      if (split && split != &SET_SRC (x))
4435 		return split;
4436 	    }
4437 	  break;
4438 
4439 	case SIGN_EXTEND:
4440 	  inner = XEXP (SET_SRC (x), 0);
4441 
4442 	  /* We can't optimize if either mode is a partial integer
4443 	     mode as we don't know how many bits are significant
4444 	     in those modes.  */
4445 	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
4446 	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
4447 	    break;
4448 
4449 	  pos = 0;
4450 	  len = GET_MODE_BITSIZE (GET_MODE (inner));
4451 	  unsignedp = 0;
4452 	  break;
4453 
4454 	case SIGN_EXTRACT:
4455 	case ZERO_EXTRACT:
4456 	  if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4457 	      && CONST_INT_P (XEXP (SET_SRC (x), 2)))
4458 	    {
4459 	      inner = XEXP (SET_SRC (x), 0);
4460 	      len = INTVAL (XEXP (SET_SRC (x), 1));
4461 	      pos = INTVAL (XEXP (SET_SRC (x), 2));
4462 
4463 	      if (BITS_BIG_ENDIAN)
4464 		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
4465 	      unsignedp = (code == ZERO_EXTRACT);
4466 	    }
4467 	  break;
4468 
4469 	default:
4470 	  break;
4471 	}
4472 
4473       if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
4474 	{
4475 	  enum machine_mode mode = GET_MODE (SET_SRC (x));
4476 
4477 	  /* For unsigned, we have a choice of a shift followed by an
4478 	     AND or two shifts.  Use two shifts for field sizes where the
4479 	     constant might be too large.  We assume here that we can
4480 	     always at least get 8-bit constants in an AND insn, which is
4481 	     true for every current RISC.  */
4482 
4483 	  if (unsignedp && len <= 8)
4484 	    {
4485 	      SUBST (SET_SRC (x),
4486 		     gen_rtx_AND (mode,
4487 				  gen_rtx_LSHIFTRT
4488 				  (mode, gen_lowpart (mode, inner),
4489 				   GEN_INT (pos)),
4490 				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
4491 
4492 	      split = find_split_point (&SET_SRC (x), insn);
4493 	      if (split && split != &SET_SRC (x))
4494 		return split;
4495 	    }
4496 	  else
4497 	    {
4498 	      SUBST (SET_SRC (x),
4499 		     gen_rtx_fmt_ee
4500 		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
4501 		      gen_rtx_ASHIFT (mode,
4502 				      gen_lowpart (mode, inner),
4503 				      GEN_INT (GET_MODE_BITSIZE (mode)
4504 					       - len - pos)),
4505 		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
4506 
4507 	      split = find_split_point (&SET_SRC (x), insn);
4508 	      if (split && split != &SET_SRC (x))
4509 		return split;
4510 	    }
4511 	}
4512 
4513       /* See if this is a simple operation with a constant as the second
4514 	 operand.  It might be that this constant is out of range and hence
4515 	 could be used as a split point.  */
4516       if (BINARY_P (SET_SRC (x))
4517 	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
4518 	  && (OBJECT_P (XEXP (SET_SRC (x), 0))
4519 	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
4520 		  && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
4521 	return &XEXP (SET_SRC (x), 1);
4522 
4523       /* Finally, see if this is a simple operation with its first operand
4524 	 not in a register.  The operation might require this operand in a
4525 	 register, so return it as a split point.  We can always do this
4526 	 because if the first operand were another operation, we would have
4527 	 already found it as a split point.  */
4528       if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
4529 	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
4530 	return &XEXP (SET_SRC (x), 0);
4531 
4532       return 0;
4533 
4534     case AND:
4535     case IOR:
4536       /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
4537 	 it is better to write this as (not (ior A B)) so we can split it.
4538 	 Similarly for IOR.  */
4539       if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
4540 	{
4541 	  SUBST (*loc,
4542 		 gen_rtx_NOT (GET_MODE (x),
4543 			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
4544 					      GET_MODE (x),
4545 					      XEXP (XEXP (x, 0), 0),
4546 					      XEXP (XEXP (x, 1), 0))));
4547 	  return find_split_point (loc, insn);
4548 	}
4549 
4550       /* Many RISC machines have a large set of logical insns.  If the
4551 	 second operand is a NOT, put it first so we will try to split the
4552 	 other operand first.  */
4553       if (GET_CODE (XEXP (x, 1)) == NOT)
4554 	{
4555 	  rtx tem = XEXP (x, 0);
4556 	  SUBST (XEXP (x, 0), XEXP (x, 1));
4557 	  SUBST (XEXP (x, 1), tem);
4558 	}
4559       break;
4560 
4561     default:
4562       break;
4563     }
4564 
4565   /* Otherwise, select our actions depending on our rtx class.  */
4566   switch (GET_RTX_CLASS (code))
4567     {
4568     case RTX_BITFIELD_OPS:		/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
4569     case RTX_TERNARY:
4570       split = find_split_point (&XEXP (x, 2), insn);
4571       if (split)
4572 	return split;
4573       /* ... fall through ...  */
4574     case RTX_BIN_ARITH:
4575     case RTX_COMM_ARITH:
4576     case RTX_COMPARE:
4577     case RTX_COMM_COMPARE:
4578       split = find_split_point (&XEXP (x, 1), insn);
4579       if (split)
4580 	return split;
4581       /* ... fall through ...  */
4582     case RTX_UNARY:
4583       /* Some machines have (and (shift ...) ...) insns.  If X is not
4584 	 an AND, but XEXP (X, 0) is, use it as our split point.  */
4585       if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
4586 	return &XEXP (x, 0);
4587 
4588       split = find_split_point (&XEXP (x, 0), insn);
4589       if (split)
4590 	return split;
4591       return loc;
4592 
4593     default:
4594       /* Otherwise, we don't have a split point.  */
4595       return 0;
4596     }
4597 }
4598 
4599 /* Throughout X, replace FROM with TO, and return the result.
4600    The result is TO if X is FROM;
4601    otherwise the result is X, but its contents may have been modified.
4602    If they were modified, a record was made in undobuf so that
4603    undo_all will (among other things) return X to its original state.
4604 
4605    If the number of changes necessary is too much to record to undo,
4606    the excess changes are not made, so the result is invalid.
4607    The changes already made can still be undone.
4608    undobuf.num_undo is incremented for such changes, so by testing that
4609    the caller can tell whether the result is valid.
4610 
4611    `n_occurrences' is incremented each time FROM is replaced.
4612 
4613    IN_DEST is nonzero if we are processing the SET_DEST of a SET.
4614 
4615    UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
4616    by copying if `n_occurrences' is nonzero.  */
4617 
4618 static rtx
4619 subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
4620 {
4621   enum rtx_code code = GET_CODE (x);
4622   enum machine_mode op0_mode = VOIDmode;
4623   const char *fmt;
4624   int len, i;
4625   rtx new_rtx;
4626 
4627 /* Two expressions are equal if they are identical copies of a shared
4628    RTX or if they are both registers with the same register number
4629    and mode.  */
4630 
4631 #define COMBINE_RTX_EQUAL_P(X,Y)			\
4632   ((X) == (Y)						\
4633    || (REG_P (X) && REG_P (Y)	\
4634        && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
4635 
4636   if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
4637     {
4638       n_occurrences++;
4639       return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
4640     }
4641 
4642   /* If X and FROM are the same register but different modes, they
4643      will not have been seen as equal above.  However, the log links code
4644      will make a LOG_LINKS entry for that case.  If we do nothing, we
4645      will try to rerecognize our original insn and, when it succeeds,
4646      we will delete the feeding insn, which is incorrect.
4647 
4648      So force this insn not to match in this (rare) case.  */
4649   if (! in_dest && code == REG && REG_P (from)
4650       && reg_overlap_mentioned_p (x, from))
4651     return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
4652 
4653   /* If this is an object, we are done unless it is a MEM or LO_SUM, both
4654      of which may contain things that can be combined.  */
4655   if (code != MEM && code != LO_SUM && OBJECT_P (x))
4656     return x;
4657 
4658   /* It is possible to have a subexpression appear twice in the insn.
4659      Suppose that FROM is a register that appears within TO.
4660      Then, after that subexpression has been scanned once by `subst',
4661      the second time it is scanned, TO may be found.  If we were
4662      to scan TO here, we would find FROM within it and create a
4663      self-referent rtl structure which is completely wrong.  */
4664   if (COMBINE_RTX_EQUAL_P (x, to))
4665     return to;
4666 
4667   /* Parallel asm_operands need special attention because all of the
4668      inputs are shared across the arms.  Furthermore, unsharing the
4669      rtl results in recognition failures.  Failure to handle this case
4670      specially can result in circular rtl.
4671 
4672      Solve this by doing a normal pass across the first entry of the
4673      parallel, and only processing the SET_DESTs of the subsequent
4674      entries.  Ug.  */
4675 
4676   if (code == PARALLEL
4677       && GET_CODE (XVECEXP (x, 0, 0)) == SET
4678       && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
4679     {
4680       new_rtx = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
4681 
4682       /* If this substitution failed, this whole thing fails.  */
4683       if (GET_CODE (new_rtx) == CLOBBER
4684 	  && XEXP (new_rtx, 0) == const0_rtx)
4685 	return new_rtx;
4686 
4687       SUBST (XVECEXP (x, 0, 0), new_rtx);
4688 
4689       for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
4690 	{
4691 	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
4692 
4693 	  if (!REG_P (dest)
4694 	      && GET_CODE (dest) != CC0
4695 	      && GET_CODE (dest) != PC)
4696 	    {
4697 	      new_rtx = subst (dest, from, to, 0, unique_copy);
4698 
4699 	      /* If this substitution failed, this whole thing fails.  */
4700 	      if (GET_CODE (new_rtx) == CLOBBER
4701 		  && XEXP (new_rtx, 0) == const0_rtx)
4702 		return new_rtx;
4703 
4704 	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
4705 	    }
4706 	}
4707     }
4708   else
4709     {
4710       len = GET_RTX_LENGTH (code);
4711       fmt = GET_RTX_FORMAT (code);
4712 
4713       /* We don't need to process a SET_DEST that is a register, CC0,
4714 	 or PC, so set up to skip this common case.  All other cases
4715 	 where we want to suppress replacing something inside a
4716 	 SET_SRC are handled via the IN_DEST operand.  */
4717       if (code == SET
4718 	  && (REG_P (SET_DEST (x))
4719 	      || GET_CODE (SET_DEST (x)) == CC0
4720 	      || GET_CODE (SET_DEST (x)) == PC))
4721 	fmt = "ie";
4722 
4723       /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
4724 	 constant.  */
4725       if (fmt[0] == 'e')
4726 	op0_mode = GET_MODE (XEXP (x, 0));
4727 
4728       for (i = 0; i < len; i++)
4729 	{
4730 	  if (fmt[i] == 'E')
4731 	    {
4732 	      int j;
4733 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4734 		{
4735 		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
4736 		    {
4737 		      new_rtx = (unique_copy && n_occurrences
4738 			     ? copy_rtx (to) : to);
4739 		      n_occurrences++;
4740 		    }
4741 		  else
4742 		    {
4743 		      new_rtx = subst (XVECEXP (x, i, j), from, to, 0,
4744 				   unique_copy);
4745 
4746 		      /* If this substitution failed, this whole thing
4747 			 fails.  */
4748 		      if (GET_CODE (new_rtx) == CLOBBER
4749 			  && XEXP (new_rtx, 0) == const0_rtx)
4750 			return new_rtx;
4751 		    }
4752 
4753 		  SUBST (XVECEXP (x, i, j), new_rtx);
4754 		}
4755 	    }
4756 	  else if (fmt[i] == 'e')
4757 	    {
4758 	      /* If this is a register being set, ignore it.  */
4759 	      new_rtx = XEXP (x, i);
4760 	      if (in_dest
4761 		  && i == 0
4762 		  && (((code == SUBREG || code == ZERO_EXTRACT)
4763 		       && REG_P (new_rtx))
4764 		      || code == STRICT_LOW_PART))
4765 		;
4766 
4767 	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4768 		{
4769 		  /* In general, don't install a subreg involving two
4770 		     modes not tieable.  It can worsen register
4771 		     allocation, and can even make invalid reload
4772 		     insns, since the reg inside may need to be copied
4773 		     from in the outside mode, and that may be invalid
4774 		     if it is an fp reg copied in integer mode.
4775 
4776 		     We allow two exceptions to this: It is valid if
4777 		     it is inside another SUBREG and the mode of that
4778 		     SUBREG and the mode of the inside of TO is
4779 		     tieable and it is valid if X is a SET that copies
4780 		     FROM to CC0.  */
4781 
4782 		  if (GET_CODE (to) == SUBREG
4783 		      && ! MODES_TIEABLE_P (GET_MODE (to),
4784 					    GET_MODE (SUBREG_REG (to)))
4785 		      && ! (code == SUBREG
4786 			    && MODES_TIEABLE_P (GET_MODE (x),
4787 						GET_MODE (SUBREG_REG (to))))
4788 #ifdef HAVE_cc0
4789 		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
4790 #endif
4791 		      )
4792 		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4793 
4794 #ifdef CANNOT_CHANGE_MODE_CLASS
4795 		  if (code == SUBREG
4796 		      && REG_P (to)
4797 		      && REGNO (to) < FIRST_PSEUDO_REGISTER
4798 		      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
4799 						   GET_MODE (to),
4800 						   GET_MODE (x)))
4801 		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4802 #endif
4803 
4804 		  new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
4805 		  n_occurrences++;
4806 		}
4807 	      else
4808 		/* If we are in a SET_DEST, suppress most cases unless we
4809 		   have gone inside a MEM, in which case we want to
4810 		   simplify the address.  We assume here that things that
4811 		   are actually part of the destination have their inner
4812 		   parts in the first expression.  This is true for SUBREG,
4813 		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4814 		   things aside from REG and MEM that should appear in a
4815 		   SET_DEST.  */
4816 		new_rtx = subst (XEXP (x, i), from, to,
4817 			     (((in_dest
4818 				&& (code == SUBREG || code == STRICT_LOW_PART
4819 				    || code == ZERO_EXTRACT))
4820 			       || code == SET)
4821 			      && i == 0), unique_copy);
4822 
4823 	      /* If we found that we will have to reject this combination,
4824 		 indicate that by returning the CLOBBER ourselves, rather than
4825 		 an expression containing it.  This will speed things up as
4826 		 well as prevent accidents where two CLOBBERs are considered
4827 		 to be equal, thus producing an incorrect simplification.  */
4828 
4829 	      if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
4830 		return new_rtx;
4831 
4832 	      if (GET_CODE (x) == SUBREG
4833 		  && (CONST_INT_P (new_rtx)
4834 		      || GET_CODE (new_rtx) == CONST_DOUBLE))
4835 		{
4836 		  enum machine_mode mode = GET_MODE (x);
4837 
4838 		  x = simplify_subreg (GET_MODE (x), new_rtx,
4839 				       GET_MODE (SUBREG_REG (x)),
4840 				       SUBREG_BYTE (x));
4841 		  if (! x)
4842 		    x = gen_rtx_CLOBBER (mode, const0_rtx);
4843 		}
4844 	      else if (CONST_INT_P (new_rtx)
4845 		       && GET_CODE (x) == ZERO_EXTEND)
4846 		{
4847 		  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
4848 						new_rtx, GET_MODE (XEXP (x, 0)));
4849 		  gcc_assert (x);
4850 		}
4851 	      else
4852 		SUBST (XEXP (x, i), new_rtx);
4853 	    }
4854 	}
4855     }
4856 
4857   /* Check if we are loading something from the constant pool via float
4858      extension; in this case we would undo compress_float_constant
4859      optimization and degenerate constant load to an immediate value.  */
4860   if (GET_CODE (x) == FLOAT_EXTEND
4861       && MEM_P (XEXP (x, 0))
4862       && MEM_READONLY_P (XEXP (x, 0)))
4863     {
4864       rtx tmp = avoid_constant_pool_reference (x);
4865       if (x != tmp)
4866         return x;
4867     }
4868 
4869   /* Try to simplify X.  If the simplification changed the code, it is likely
4870      that further simplification will help, so loop, but limit the number
4871      of repetitions that will be performed.  */
4872 
4873   for (i = 0; i < 4; i++)
4874     {
4875       /* If X is sufficiently simple, don't bother trying to do anything
4876 	 with it.  */
4877       if (code != CONST_INT && code != REG && code != CLOBBER)
4878 	x = combine_simplify_rtx (x, op0_mode, in_dest);
4879 
4880       if (GET_CODE (x) == code)
4881 	break;
4882 
4883       code = GET_CODE (x);
4884 
4885       /* We no longer know the original mode of operand 0 since we
4886 	 have changed the form of X)  */
4887       op0_mode = VOIDmode;
4888     }
4889 
4890   return x;
4891 }
4892 
4893 /* Simplify X, a piece of RTL.  We just operate on the expression at the
4894    outer level; call `subst' to simplify recursively.  Return the new
4895    expression.
4896 
4897    OP0_MODE is the original mode of XEXP (x, 0).  IN_DEST is nonzero
4898    if we are inside a SET_DEST.  */
4899 
4900 static rtx
4901 combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
4902 {
4903   enum rtx_code code = GET_CODE (x);
4904   enum machine_mode mode = GET_MODE (x);
4905   rtx temp;
4906   int i;
4907 
4908   /* If this is a commutative operation, put a constant last and a complex
4909      expression first.  We don't need to do this for comparisons here.  */
4910   if (COMMUTATIVE_ARITH_P (x)
4911       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
4912     {
4913       temp = XEXP (x, 0);
4914       SUBST (XEXP (x, 0), XEXP (x, 1));
4915       SUBST (XEXP (x, 1), temp);
4916     }
4917 
4918   /* If this is a simple operation applied to an IF_THEN_ELSE, try
4919      applying it to the arms of the IF_THEN_ELSE.  This often simplifies
4920      things.  Check for cases where both arms are testing the same
4921      condition.
4922 
4923      Don't do anything if all operands are very simple.  */
4924 
4925   if ((BINARY_P (x)
4926        && ((!OBJECT_P (XEXP (x, 0))
4927 	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4928 		  && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
4929 	   || (!OBJECT_P (XEXP (x, 1))
4930 	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
4931 		     && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
4932       || (UNARY_P (x)
4933 	  && (!OBJECT_P (XEXP (x, 0))
4934 	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4935 		     && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
4936     {
4937       rtx cond, true_rtx, false_rtx;
4938 
4939       cond = if_then_else_cond (x, &true_rtx, &false_rtx);
4940       if (cond != 0
4941 	  /* If everything is a comparison, what we have is highly unlikely
4942 	     to be simpler, so don't use it.  */
4943 	  && ! (COMPARISON_P (x)
4944 		&& (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
4945 	{
4946 	  rtx cop1 = const0_rtx;
4947 	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
4948 
4949 	  if (cond_code == NE && COMPARISON_P (cond))
4950 	    return x;
4951 
4952 	  /* Simplify the alternative arms; this may collapse the true and
4953 	     false arms to store-flag values.  Be careful to use copy_rtx
4954 	     here since true_rtx or false_rtx might share RTL with x as a
4955 	     result of the if_then_else_cond call above.  */
4956 	  true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
4957 	  false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
4958 
4959 	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
4960 	     is unlikely to be simpler.  */
4961 	  if (general_operand (true_rtx, VOIDmode)
4962 	      && general_operand (false_rtx, VOIDmode))
4963 	    {
4964 	      enum rtx_code reversed;
4965 
4966 	      /* Restarting if we generate a store-flag expression will cause
4967 		 us to loop.  Just drop through in this case.  */
4968 
4969 	      /* If the result values are STORE_FLAG_VALUE and zero, we can
4970 		 just make the comparison operation.  */
4971 	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
4972 		x = simplify_gen_relational (cond_code, mode, VOIDmode,
4973 					     cond, cop1);
4974 	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
4975 		       && ((reversed = reversed_comparison_code_parts
4976 					(cond_code, cond, cop1, NULL))
4977 			   != UNKNOWN))
4978 		x = simplify_gen_relational (reversed, mode, VOIDmode,
4979 					     cond, cop1);
4980 
4981 	      /* Likewise, we can make the negate of a comparison operation
4982 		 if the result values are - STORE_FLAG_VALUE and zero.  */
4983 	      else if (CONST_INT_P (true_rtx)
4984 		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
4985 		       && false_rtx == const0_rtx)
4986 		x = simplify_gen_unary (NEG, mode,
4987 					simplify_gen_relational (cond_code,
4988 								 mode, VOIDmode,
4989 								 cond, cop1),
4990 					mode);
4991 	      else if (CONST_INT_P (false_rtx)
4992 		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
4993 		       && true_rtx == const0_rtx
4994 		       && ((reversed = reversed_comparison_code_parts
4995 					(cond_code, cond, cop1, NULL))
4996 			   != UNKNOWN))
4997 		x = simplify_gen_unary (NEG, mode,
4998 					simplify_gen_relational (reversed,
4999 								 mode, VOIDmode,
5000 								 cond, cop1),
5001 					mode);
5002 	      else
5003 		return gen_rtx_IF_THEN_ELSE (mode,
5004 					     simplify_gen_relational (cond_code,
5005 								      mode,
5006 								      VOIDmode,
5007 								      cond,
5008 								      cop1),
5009 					     true_rtx, false_rtx);
5010 
5011 	      code = GET_CODE (x);
5012 	      op0_mode = VOIDmode;
5013 	    }
5014 	}
5015     }
5016 
5017   /* Try to fold this expression in case we have constants that weren't
5018      present before.  */
5019   temp = 0;
5020   switch (GET_RTX_CLASS (code))
5021     {
5022     case RTX_UNARY:
5023       if (op0_mode == VOIDmode)
5024 	op0_mode = GET_MODE (XEXP (x, 0));
5025       temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
5026       break;
5027     case RTX_COMPARE:
5028     case RTX_COMM_COMPARE:
5029       {
5030 	enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
5031 	if (cmp_mode == VOIDmode)
5032 	  {
5033 	    cmp_mode = GET_MODE (XEXP (x, 1));
5034 	    if (cmp_mode == VOIDmode)
5035 	      cmp_mode = op0_mode;
5036 	  }
5037 	temp = simplify_relational_operation (code, mode, cmp_mode,
5038 					      XEXP (x, 0), XEXP (x, 1));
5039       }
5040       break;
5041     case RTX_COMM_ARITH:
5042     case RTX_BIN_ARITH:
5043       temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5044       break;
5045     case RTX_BITFIELD_OPS:
5046     case RTX_TERNARY:
5047       temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5048 					 XEXP (x, 1), XEXP (x, 2));
5049       break;
5050     default:
5051       break;
5052     }
5053 
5054   if (temp)
5055     {
5056       x = temp;
5057       code = GET_CODE (temp);
5058       op0_mode = VOIDmode;
5059       mode = GET_MODE (temp);
5060     }
5061 
5062   /* First see if we can apply the inverse distributive law.  */
5063   if (code == PLUS || code == MINUS
5064       || code == AND || code == IOR || code == XOR)
5065     {
5066       x = apply_distributive_law (x);
5067       code = GET_CODE (x);
5068       op0_mode = VOIDmode;
5069     }
5070 
5071   /* If CODE is an associative operation not otherwise handled, see if we
5072      can associate some operands.  This can win if they are constants or
5073      if they are logically related (i.e. (a & b) & a).  */
5074   if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5075        || code == AND || code == IOR || code == XOR
5076        || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
5077       && ((INTEGRAL_MODE_P (mode) && code != DIV)
5078 	  || (flag_associative_math && FLOAT_MODE_P (mode))))
5079     {
5080       if (GET_CODE (XEXP (x, 0)) == code)
5081 	{
5082 	  rtx other = XEXP (XEXP (x, 0), 0);
5083 	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5084 	  rtx inner_op1 = XEXP (x, 1);
5085 	  rtx inner;
5086 
5087 	  /* Make sure we pass the constant operand if any as the second
5088 	     one if this is a commutative operation.  */
5089 	  if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
5090 	    {
5091 	      rtx tem = inner_op0;
5092 	      inner_op0 = inner_op1;
5093 	      inner_op1 = tem;
5094 	    }
5095 	  inner = simplify_binary_operation (code == MINUS ? PLUS
5096 					     : code == DIV ? MULT
5097 					     : code,
5098 					     mode, inner_op0, inner_op1);
5099 
5100 	  /* For commutative operations, try the other pair if that one
5101 	     didn't simplify.  */
5102 	  if (inner == 0 && COMMUTATIVE_ARITH_P (x))
5103 	    {
5104 	      other = XEXP (XEXP (x, 0), 1);
5105 	      inner = simplify_binary_operation (code, mode,
5106 						 XEXP (XEXP (x, 0), 0),
5107 						 XEXP (x, 1));
5108 	    }
5109 
5110 	  if (inner)
5111 	    return simplify_gen_binary (code, mode, other, inner);
5112 	}
5113     }
5114 
5115   /* A little bit of algebraic simplification here.  */
5116   switch (code)
5117     {
5118     case MEM:
5119       /* Ensure that our address has any ASHIFTs converted to MULT in case
5120 	 address-recognizing predicates are called later.  */
5121       temp = make_compound_operation (XEXP (x, 0), MEM);
5122       SUBST (XEXP (x, 0), temp);
5123       break;
5124 
5125     case SUBREG:
5126       if (op0_mode == VOIDmode)
5127 	op0_mode = GET_MODE (SUBREG_REG (x));
5128 
5129       /* See if this can be moved to simplify_subreg.  */
5130       if (CONSTANT_P (SUBREG_REG (x))
5131 	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
5132 	     /* Don't call gen_lowpart if the inner mode
5133 		is VOIDmode and we cannot simplify it, as SUBREG without
5134 		inner mode is invalid.  */
5135 	  && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5136 	      || gen_lowpart_common (mode, SUBREG_REG (x))))
5137 	return gen_lowpart (mode, SUBREG_REG (x));
5138 
5139       if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
5140 	break;
5141       {
5142 	rtx temp;
5143 	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
5144 				SUBREG_BYTE (x));
5145 	if (temp)
5146 	  return temp;
5147       }
5148 
5149       /* Don't change the mode of the MEM if that would change the meaning
5150 	 of the address.  */
5151       if (MEM_P (SUBREG_REG (x))
5152 	  && (MEM_VOLATILE_P (SUBREG_REG (x))
5153 	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
5154 	return gen_rtx_CLOBBER (mode, const0_rtx);
5155 
5156       /* Note that we cannot do any narrowing for non-constants since
5157 	 we might have been counting on using the fact that some bits were
5158 	 zero.  We now do this in the SET.  */
5159 
5160       break;
5161 
5162     case NEG:
5163       temp = expand_compound_operation (XEXP (x, 0));
5164 
5165       /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
5166 	 replaced by (lshiftrt X C).  This will convert
5167 	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
5168 
5169       if (GET_CODE (temp) == ASHIFTRT
5170 	  && CONST_INT_P (XEXP (temp, 1))
5171 	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
5172 	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
5173 				     INTVAL (XEXP (temp, 1)));
5174 
5175       /* If X has only a single bit that might be nonzero, say, bit I, convert
5176 	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
5177 	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
5178 	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
5179 	 or a SUBREG of one since we'd be making the expression more
5180 	 complex if it was just a register.  */
5181 
5182       if (!REG_P (temp)
5183 	  && ! (GET_CODE (temp) == SUBREG
5184 		&& REG_P (SUBREG_REG (temp)))
5185 	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
5186 	{
5187 	  rtx temp1 = simplify_shift_const
5188 	    (NULL_RTX, ASHIFTRT, mode,
5189 	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
5190 				   GET_MODE_BITSIZE (mode) - 1 - i),
5191 	     GET_MODE_BITSIZE (mode) - 1 - i);
5192 
5193 	  /* If all we did was surround TEMP with the two shifts, we
5194 	     haven't improved anything, so don't use it.  Otherwise,
5195 	     we are better off with TEMP1.  */
5196 	  if (GET_CODE (temp1) != ASHIFTRT
5197 	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
5198 	      || XEXP (XEXP (temp1, 0), 0) != temp)
5199 	    return temp1;
5200 	}
5201       break;
5202 
5203     case TRUNCATE:
5204       /* We can't handle truncation to a partial integer mode here
5205 	 because we don't know the real bitsize of the partial
5206 	 integer mode.  */
5207       if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
5208 	break;
5209 
5210       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5211 	SUBST (XEXP (x, 0),
5212 	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
5213 			      GET_MODE_MASK (mode), 0));
5214 
5215       /* We can truncate a constant value and return it.  */
5216       if (CONST_INT_P (XEXP (x, 0)))
5217 	return gen_int_mode (INTVAL (XEXP (x, 0)), mode);
5218 
5219       /* Similarly to what we do in simplify-rtx.c, a truncate of a register
5220 	 whose value is a comparison can be replaced with a subreg if
5221 	 STORE_FLAG_VALUE permits.  */
5222       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5223 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
5224 	  && (temp = get_last_value (XEXP (x, 0)))
5225 	  && COMPARISON_P (temp))
5226 	return gen_lowpart (mode, XEXP (x, 0));
5227       break;
5228 
5229     case CONST:
5230       /* (const (const X)) can become (const X).  Do it this way rather than
5231 	 returning the inner CONST since CONST can be shared with a
5232 	 REG_EQUAL note.  */
5233       if (GET_CODE (XEXP (x, 0)) == CONST)
5234 	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
5235       break;
5236 
5237 #ifdef HAVE_lo_sum
5238     case LO_SUM:
5239       /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
5240 	 can add in an offset.  find_split_point will split this address up
5241 	 again if it doesn't match.  */
5242       if (GET_CODE (XEXP (x, 0)) == HIGH
5243 	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
5244 	return XEXP (x, 1);
5245       break;
5246 #endif
5247 
5248     case PLUS:
5249       /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
5250 	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
5251 	 bit-field and can be replaced by either a sign_extend or a
5252 	 sign_extract.  The `and' may be a zero_extend and the two
5253 	 <c>, -<c> constants may be reversed.  */
5254       if (GET_CODE (XEXP (x, 0)) == XOR
5255 	  && CONST_INT_P (XEXP (x, 1))
5256 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
5257 	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
5258 	  && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5259 	      || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5260 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5261 	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
5262 	       && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
5263 	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5264 		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
5265 	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
5266 		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
5267 		      == (unsigned int) i + 1))))
5268 	return simplify_shift_const
5269 	  (NULL_RTX, ASHIFTRT, mode,
5270 	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
5271 				 XEXP (XEXP (XEXP (x, 0), 0), 0),
5272 				 GET_MODE_BITSIZE (mode) - (i + 1)),
5273 	   GET_MODE_BITSIZE (mode) - (i + 1));
5274 
5275       /* If only the low-order bit of X is possibly nonzero, (plus x -1)
5276 	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
5277 	 the bitsize of the mode - 1.  This allows simplification of
5278 	 "a = (b & 8) == 0;"  */
5279       if (XEXP (x, 1) == constm1_rtx
5280 	  && !REG_P (XEXP (x, 0))
5281 	  && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5282 		&& REG_P (SUBREG_REG (XEXP (x, 0))))
5283 	  && nonzero_bits (XEXP (x, 0), mode) == 1)
5284 	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
5285 	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
5286 				 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
5287 				 GET_MODE_BITSIZE (mode) - 1),
5288 	   GET_MODE_BITSIZE (mode) - 1);
5289 
5290       /* If we are adding two things that have no bits in common, convert
5291 	 the addition into an IOR.  This will often be further simplified,
5292 	 for example in cases like ((a & 1) + (a & 2)), which can
5293 	 become a & 3.  */
5294 
5295       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5296 	  && (nonzero_bits (XEXP (x, 0), mode)
5297 	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
5298 	{
5299 	  /* Try to simplify the expression further.  */
5300 	  rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
5301 	  temp = combine_simplify_rtx (tor, VOIDmode, in_dest);
5302 
5303 	  /* If we could, great.  If not, do not go ahead with the IOR
5304 	     replacement, since PLUS appears in many special purpose
5305 	     address arithmetic instructions.  */
5306 	  if (GET_CODE (temp) != CLOBBER && temp != tor)
5307 	    return temp;
5308 	}
5309       break;
5310 
5311     case MINUS:
5312       /* (minus <foo> (and <foo> (const_int -pow2))) becomes
5313 	 (and <foo> (const_int pow2-1))  */
5314       if (GET_CODE (XEXP (x, 1)) == AND
5315 	  && CONST_INT_P (XEXP (XEXP (x, 1), 1))
5316 	  && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
5317 	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
5318 	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
5319 				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
5320       break;
5321 
5322     case MULT:
5323       /* If we have (mult (plus A B) C), apply the distributive law and then
5324 	 the inverse distributive law to see if things simplify.  This
5325 	 occurs mostly in addresses, often when unrolling loops.  */
5326 
5327       if (GET_CODE (XEXP (x, 0)) == PLUS)
5328 	{
5329 	  rtx result = distribute_and_simplify_rtx (x, 0);
5330 	  if (result)
5331 	    return result;
5332 	}
5333 
5334       /* Try simplify a*(b/c) as (a*b)/c.  */
5335       if (FLOAT_MODE_P (mode) && flag_associative_math
5336 	  && GET_CODE (XEXP (x, 0)) == DIV)
5337 	{
5338 	  rtx tem = simplify_binary_operation (MULT, mode,
5339 					       XEXP (XEXP (x, 0), 0),
5340 					       XEXP (x, 1));
5341 	  if (tem)
5342 	    return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
5343 	}
5344       break;
5345 
5346     case UDIV:
5347       /* If this is a divide by a power of two, treat it as a shift if
5348 	 its first operand is a shift.  */
5349       if (CONST_INT_P (XEXP (x, 1))
5350 	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
5351 	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
5352 	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
5353 	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
5354 	      || GET_CODE (XEXP (x, 0)) == ROTATE
5355 	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
5356 	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
5357       break;
5358 
5359     case EQ:  case NE:
5360     case GT:  case GTU:  case GE:  case GEU:
5361     case LT:  case LTU:  case LE:  case LEU:
5362     case UNEQ:  case LTGT:
5363     case UNGT:  case UNGE:
5364     case UNLT:  case UNLE:
5365     case UNORDERED: case ORDERED:
5366       /* If the first operand is a condition code, we can't do anything
5367 	 with it.  */
5368       if (GET_CODE (XEXP (x, 0)) == COMPARE
5369 	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
5370 	      && ! CC0_P (XEXP (x, 0))))
5371 	{
5372 	  rtx op0 = XEXP (x, 0);
5373 	  rtx op1 = XEXP (x, 1);
5374 	  enum rtx_code new_code;
5375 
5376 	  if (GET_CODE (op0) == COMPARE)
5377 	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5378 
5379 	  /* Simplify our comparison, if possible.  */
5380 	  new_code = simplify_comparison (code, &op0, &op1);
5381 
5382 	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
5383 	     if only the low-order bit is possibly nonzero in X (such as when
5384 	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
5385 	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
5386 	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
5387 	     (plus X 1).
5388 
5389 	     Remove any ZERO_EXTRACT we made when thinking this was a
5390 	     comparison.  It may now be simpler to use, e.g., an AND.  If a
5391 	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
5392 	     the call to make_compound_operation in the SET case.  */
5393 
5394 	  if (STORE_FLAG_VALUE == 1
5395 	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5396 	      && op1 == const0_rtx
5397 	      && mode == GET_MODE (op0)
5398 	      && nonzero_bits (op0, mode) == 1)
5399 	    return gen_lowpart (mode,
5400 				expand_compound_operation (op0));
5401 
5402 	  else if (STORE_FLAG_VALUE == 1
5403 		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5404 		   && op1 == const0_rtx
5405 		   && mode == GET_MODE (op0)
5406 		   && (num_sign_bit_copies (op0, mode)
5407 		       == GET_MODE_BITSIZE (mode)))
5408 	    {
5409 	      op0 = expand_compound_operation (op0);
5410 	      return simplify_gen_unary (NEG, mode,
5411 					 gen_lowpart (mode, op0),
5412 					 mode);
5413 	    }
5414 
5415 	  else if (STORE_FLAG_VALUE == 1
5416 		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5417 		   && op1 == const0_rtx
5418 		   && mode == GET_MODE (op0)
5419 		   && nonzero_bits (op0, mode) == 1)
5420 	    {
5421 	      op0 = expand_compound_operation (op0);
5422 	      return simplify_gen_binary (XOR, mode,
5423 					  gen_lowpart (mode, op0),
5424 					  const1_rtx);
5425 	    }
5426 
5427 	  else if (STORE_FLAG_VALUE == 1
5428 		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5429 		   && op1 == const0_rtx
5430 		   && mode == GET_MODE (op0)
5431 		   && (num_sign_bit_copies (op0, mode)
5432 		       == GET_MODE_BITSIZE (mode)))
5433 	    {
5434 	      op0 = expand_compound_operation (op0);
5435 	      return plus_constant (gen_lowpart (mode, op0), 1);
5436 	    }
5437 
5438 	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
5439 	     those above.  */
5440 	  if (STORE_FLAG_VALUE == -1
5441 	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5442 	      && op1 == const0_rtx
5443 	      && (num_sign_bit_copies (op0, mode)
5444 		  == GET_MODE_BITSIZE (mode)))
5445 	    return gen_lowpart (mode,
5446 				expand_compound_operation (op0));
5447 
5448 	  else if (STORE_FLAG_VALUE == -1
5449 		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5450 		   && op1 == const0_rtx
5451 		   && mode == GET_MODE (op0)
5452 		   && nonzero_bits (op0, mode) == 1)
5453 	    {
5454 	      op0 = expand_compound_operation (op0);
5455 	      return simplify_gen_unary (NEG, mode,
5456 					 gen_lowpart (mode, op0),
5457 					 mode);
5458 	    }
5459 
5460 	  else if (STORE_FLAG_VALUE == -1
5461 		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5462 		   && op1 == const0_rtx
5463 		   && mode == GET_MODE (op0)
5464 		   && (num_sign_bit_copies (op0, mode)
5465 		       == GET_MODE_BITSIZE (mode)))
5466 	    {
5467 	      op0 = expand_compound_operation (op0);
5468 	      return simplify_gen_unary (NOT, mode,
5469 					 gen_lowpart (mode, op0),
5470 					 mode);
5471 	    }
5472 
5473 	  /* If X is 0/1, (eq X 0) is X-1.  */
5474 	  else if (STORE_FLAG_VALUE == -1
5475 		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5476 		   && op1 == const0_rtx
5477 		   && mode == GET_MODE (op0)
5478 		   && nonzero_bits (op0, mode) == 1)
5479 	    {
5480 	      op0 = expand_compound_operation (op0);
5481 	      return plus_constant (gen_lowpart (mode, op0), -1);
5482 	    }
5483 
5484 	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
5485 	     one bit that might be nonzero, we can convert (ne x 0) to
5486 	     (ashift x c) where C puts the bit in the sign bit.  Remove any
5487 	     AND with STORE_FLAG_VALUE when we are done, since we are only
5488 	     going to test the sign bit.  */
5489 	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5490 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5491 	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5492 		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5493 	      && op1 == const0_rtx
5494 	      && mode == GET_MODE (op0)
5495 	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
5496 	    {
5497 	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
5498 					expand_compound_operation (op0),
5499 					GET_MODE_BITSIZE (mode) - 1 - i);
5500 	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
5501 		return XEXP (x, 0);
5502 	      else
5503 		return x;
5504 	    }
5505 
5506 	  /* If the code changed, return a whole new comparison.  */
5507 	  if (new_code != code)
5508 	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
5509 
5510 	  /* Otherwise, keep this operation, but maybe change its operands.
5511 	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
5512 	  SUBST (XEXP (x, 0), op0);
5513 	  SUBST (XEXP (x, 1), op1);
5514 	}
5515       break;
5516 
5517     case IF_THEN_ELSE:
5518       return simplify_if_then_else (x);
5519 
5520     case ZERO_EXTRACT:
5521     case SIGN_EXTRACT:
5522     case ZERO_EXTEND:
5523     case SIGN_EXTEND:
5524       /* If we are processing SET_DEST, we are done.  */
5525       if (in_dest)
5526 	return x;
5527 
5528       return expand_compound_operation (x);
5529 
5530     case SET:
5531       return simplify_set (x);
5532 
5533     case AND:
5534     case IOR:
5535       return simplify_logical (x);
5536 
5537     case ASHIFT:
5538     case LSHIFTRT:
5539     case ASHIFTRT:
5540     case ROTATE:
5541     case ROTATERT:
5542       /* If this is a shift by a constant amount, simplify it.  */
5543       if (CONST_INT_P (XEXP (x, 1)))
5544 	return simplify_shift_const (x, code, mode, XEXP (x, 0),
5545 				     INTVAL (XEXP (x, 1)));
5546 
5547       else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
5548 	SUBST (XEXP (x, 1),
5549 	       force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
5550 			      ((HOST_WIDE_INT) 1
5551 			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
5552 			      - 1,
5553 			      0));
5554       break;
5555 
5556     default:
5557       break;
5558     }
5559 
5560   return x;
5561 }
5562 
5563 /* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
5564 
5565 static rtx
5566 simplify_if_then_else (rtx x)
5567 {
5568   enum machine_mode mode = GET_MODE (x);
5569   rtx cond = XEXP (x, 0);
5570   rtx true_rtx = XEXP (x, 1);
5571   rtx false_rtx = XEXP (x, 2);
5572   enum rtx_code true_code = GET_CODE (cond);
5573   int comparison_p = COMPARISON_P (cond);
5574   rtx temp;
5575   int i;
5576   enum rtx_code false_code;
5577   rtx reversed;
5578 
5579   /* Simplify storing of the truth value.  */
5580   if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
5581     return simplify_gen_relational (true_code, mode, VOIDmode,
5582 				    XEXP (cond, 0), XEXP (cond, 1));
5583 
5584   /* Also when the truth value has to be reversed.  */
5585   if (comparison_p
5586       && true_rtx == const0_rtx && false_rtx == const_true_rtx
5587       && (reversed = reversed_comparison (cond, mode)))
5588     return reversed;
5589 
5590   /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
5591      in it is being compared against certain values.  Get the true and false
5592      comparisons and see if that says anything about the value of each arm.  */
5593 
5594   if (comparison_p
5595       && ((false_code = reversed_comparison_code (cond, NULL))
5596 	  != UNKNOWN)
5597       && REG_P (XEXP (cond, 0)))
5598     {
5599       HOST_WIDE_INT nzb;
5600       rtx from = XEXP (cond, 0);
5601       rtx true_val = XEXP (cond, 1);
5602       rtx false_val = true_val;
5603       int swapped = 0;
5604 
5605       /* If FALSE_CODE is EQ, swap the codes and arms.  */
5606 
5607       if (false_code == EQ)
5608 	{
5609 	  swapped = 1, true_code = EQ, false_code = NE;
5610 	  temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5611 	}
5612 
5613       /* If we are comparing against zero and the expression being tested has
5614 	 only a single bit that might be nonzero, that is its value when it is
5615 	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
5616 
5617       if (true_code == EQ && true_val == const0_rtx
5618 	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
5619 	{
5620 	  false_code = EQ;
5621 	  false_val = GEN_INT (trunc_int_for_mode (nzb, GET_MODE (from)));
5622 	}
5623       else if (true_code == EQ && true_val == const0_rtx
5624 	       && (num_sign_bit_copies (from, GET_MODE (from))
5625 		   == GET_MODE_BITSIZE (GET_MODE (from))))
5626 	{
5627 	  false_code = EQ;
5628 	  false_val = constm1_rtx;
5629 	}
5630 
5631       /* Now simplify an arm if we know the value of the register in the
5632 	 branch and it is used in the arm.  Be careful due to the potential
5633 	 of locally-shared RTL.  */
5634 
5635       if (reg_mentioned_p (from, true_rtx))
5636 	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
5637 				      from, true_val),
5638 		      pc_rtx, pc_rtx, 0, 0);
5639       if (reg_mentioned_p (from, false_rtx))
5640 	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
5641 				   from, false_val),
5642 		       pc_rtx, pc_rtx, 0, 0);
5643 
5644       SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
5645       SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
5646 
5647       true_rtx = XEXP (x, 1);
5648       false_rtx = XEXP (x, 2);
5649       true_code = GET_CODE (cond);
5650     }
5651 
5652   /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
5653      reversed, do so to avoid needing two sets of patterns for
5654      subtract-and-branch insns.  Similarly if we have a constant in the true
5655      arm, the false arm is the same as the first operand of the comparison, or
5656      the false arm is more complicated than the true arm.  */
5657 
5658   if (comparison_p
5659       && reversed_comparison_code (cond, NULL) != UNKNOWN
5660       && (true_rtx == pc_rtx
5661 	  || (CONSTANT_P (true_rtx)
5662 	      && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
5663 	  || true_rtx == const0_rtx
5664 	  || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
5665 	  || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
5666 	      && !OBJECT_P (false_rtx))
5667 	  || reg_mentioned_p (true_rtx, false_rtx)
5668 	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
5669     {
5670       true_code = reversed_comparison_code (cond, NULL);
5671       SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
5672       SUBST (XEXP (x, 1), false_rtx);
5673       SUBST (XEXP (x, 2), true_rtx);
5674 
5675       temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5676       cond = XEXP (x, 0);
5677 
5678       /* It is possible that the conditional has been simplified out.  */
5679       true_code = GET_CODE (cond);
5680       comparison_p = COMPARISON_P (cond);
5681     }
5682 
5683   /* If the two arms are identical, we don't need the comparison.  */
5684 
5685   if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
5686     return true_rtx;
5687 
5688   /* Convert a == b ? b : a to "a".  */
5689   if (true_code == EQ && ! side_effects_p (cond)
5690       && !HONOR_NANS (mode)
5691       && rtx_equal_p (XEXP (cond, 0), false_rtx)
5692       && rtx_equal_p (XEXP (cond, 1), true_rtx))
5693     return false_rtx;
5694   else if (true_code == NE && ! side_effects_p (cond)
5695 	   && !HONOR_NANS (mode)
5696 	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
5697 	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
5698     return true_rtx;
5699 
5700   /* Look for cases where we have (abs x) or (neg (abs X)).  */
5701 
5702   if (GET_MODE_CLASS (mode) == MODE_INT
5703       && comparison_p
5704       && XEXP (cond, 1) == const0_rtx
5705       && GET_CODE (false_rtx) == NEG
5706       && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
5707       && rtx_equal_p (true_rtx, XEXP (cond, 0))
5708       && ! side_effects_p (true_rtx))
5709     switch (true_code)
5710       {
5711       case GT:
5712       case GE:
5713 	return simplify_gen_unary (ABS, mode, true_rtx, mode);
5714       case LT:
5715       case LE:
5716 	return
5717 	  simplify_gen_unary (NEG, mode,
5718 			      simplify_gen_unary (ABS, mode, true_rtx, mode),
5719 			      mode);
5720       default:
5721 	break;
5722       }
5723 
5724   /* Look for MIN or MAX.  */
5725 
5726   if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
5727       && comparison_p
5728       && rtx_equal_p (XEXP (cond, 0), true_rtx)
5729       && rtx_equal_p (XEXP (cond, 1), false_rtx)
5730       && ! side_effects_p (cond))
5731     switch (true_code)
5732       {
5733       case GE:
5734       case GT:
5735 	return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
5736       case LE:
5737       case LT:
5738 	return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5739       case GEU:
5740       case GTU:
5741 	return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5742       case LEU:
5743       case LTU:
5744 	return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5745       default:
5746 	break;
5747       }
5748 
5749   /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5750      second operand is zero, this can be done as (OP Z (mult COND C2)) where
5751      C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5752      SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5753      We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5754      neither 1 or -1, but it isn't worth checking for.  */
5755 
5756   if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5757       && comparison_p
5758       && GET_MODE_CLASS (mode) == MODE_INT
5759       && ! side_effects_p (x))
5760     {
5761       rtx t = make_compound_operation (true_rtx, SET);
5762       rtx f = make_compound_operation (false_rtx, SET);
5763       rtx cond_op0 = XEXP (cond, 0);
5764       rtx cond_op1 = XEXP (cond, 1);
5765       enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5766       enum machine_mode m = mode;
5767       rtx z = 0, c1 = NULL_RTX;
5768 
5769       if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5770 	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5771 	   || GET_CODE (t) == ASHIFT
5772 	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5773 	  && rtx_equal_p (XEXP (t, 0), f))
5774 	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5775 
5776       /* If an identity-zero op is commutative, check whether there
5777 	 would be a match if we swapped the operands.  */
5778       else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5779 		|| GET_CODE (t) == XOR)
5780 	       && rtx_equal_p (XEXP (t, 1), f))
5781 	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5782       else if (GET_CODE (t) == SIGN_EXTEND
5783 	       && (GET_CODE (XEXP (t, 0)) == PLUS
5784 		   || GET_CODE (XEXP (t, 0)) == MINUS
5785 		   || GET_CODE (XEXP (t, 0)) == IOR
5786 		   || GET_CODE (XEXP (t, 0)) == XOR
5787 		   || GET_CODE (XEXP (t, 0)) == ASHIFT
5788 		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5789 		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5790 	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5791 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5792 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5793 	       && (num_sign_bit_copies (f, GET_MODE (f))
5794 		   > (unsigned int)
5795 		     (GET_MODE_BITSIZE (mode)
5796 		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5797 	{
5798 	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5799 	  extend_op = SIGN_EXTEND;
5800 	  m = GET_MODE (XEXP (t, 0));
5801 	}
5802       else if (GET_CODE (t) == SIGN_EXTEND
5803 	       && (GET_CODE (XEXP (t, 0)) == PLUS
5804 		   || GET_CODE (XEXP (t, 0)) == IOR
5805 		   || GET_CODE (XEXP (t, 0)) == XOR)
5806 	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5807 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5808 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5809 	       && (num_sign_bit_copies (f, GET_MODE (f))
5810 		   > (unsigned int)
5811 		     (GET_MODE_BITSIZE (mode)
5812 		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5813 	{
5814 	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5815 	  extend_op = SIGN_EXTEND;
5816 	  m = GET_MODE (XEXP (t, 0));
5817 	}
5818       else if (GET_CODE (t) == ZERO_EXTEND
5819 	       && (GET_CODE (XEXP (t, 0)) == PLUS
5820 		   || GET_CODE (XEXP (t, 0)) == MINUS
5821 		   || GET_CODE (XEXP (t, 0)) == IOR
5822 		   || GET_CODE (XEXP (t, 0)) == XOR
5823 		   || GET_CODE (XEXP (t, 0)) == ASHIFT
5824 		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5825 		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5826 	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5827 	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5828 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5829 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5830 	       && ((nonzero_bits (f, GET_MODE (f))
5831 		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5832 		   == 0))
5833 	{
5834 	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5835 	  extend_op = ZERO_EXTEND;
5836 	  m = GET_MODE (XEXP (t, 0));
5837 	}
5838       else if (GET_CODE (t) == ZERO_EXTEND
5839 	       && (GET_CODE (XEXP (t, 0)) == PLUS
5840 		   || GET_CODE (XEXP (t, 0)) == IOR
5841 		   || GET_CODE (XEXP (t, 0)) == XOR)
5842 	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5843 	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5844 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5845 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5846 	       && ((nonzero_bits (f, GET_MODE (f))
5847 		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5848 		   == 0))
5849 	{
5850 	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5851 	  extend_op = ZERO_EXTEND;
5852 	  m = GET_MODE (XEXP (t, 0));
5853 	}
5854 
5855       if (z)
5856 	{
5857 	  temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5858 						 cond_op0, cond_op1),
5859 			pc_rtx, pc_rtx, 0, 0);
5860 	  temp = simplify_gen_binary (MULT, m, temp,
5861 				      simplify_gen_binary (MULT, m, c1,
5862 							   const_true_rtx));
5863 	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5864 	  temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5865 
5866 	  if (extend_op != UNKNOWN)
5867 	    temp = simplify_gen_unary (extend_op, mode, temp, m);
5868 
5869 	  return temp;
5870 	}
5871     }
5872 
5873   /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5874      1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5875      negation of a single bit, we can convert this operation to a shift.  We
5876      can actually do this more generally, but it doesn't seem worth it.  */
5877 
5878   if (true_code == NE && XEXP (cond, 1) == const0_rtx
5879       && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
5880       && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5881 	   && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5882 	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5883 	       == GET_MODE_BITSIZE (mode))
5884 	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5885     return
5886       simplify_shift_const (NULL_RTX, ASHIFT, mode,
5887 			    gen_lowpart (mode, XEXP (cond, 0)), i);
5888 
5889   /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
5890   if (true_code == NE && XEXP (cond, 1) == const0_rtx
5891       && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
5892       && GET_MODE (XEXP (cond, 0)) == mode
5893       && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5894 	  == nonzero_bits (XEXP (cond, 0), mode)
5895       && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5896     return XEXP (cond, 0);
5897 
5898   return x;
5899 }
5900 
5901 /* Simplify X, a SET expression.  Return the new expression.  */
5902 
5903 static rtx
5904 simplify_set (rtx x)
5905 {
5906   rtx src = SET_SRC (x);
5907   rtx dest = SET_DEST (x);
5908   enum machine_mode mode
5909     = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5910   rtx other_insn;
5911   rtx *cc_use;
5912 
5913   /* (set (pc) (return)) gets written as (return).  */
5914   if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5915     return src;
5916 
5917   /* Now that we know for sure which bits of SRC we are using, see if we can
5918      simplify the expression for the object knowing that we only need the
5919      low-order bits.  */
5920 
5921   if (GET_MODE_CLASS (mode) == MODE_INT
5922       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5923     {
5924       src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0);
5925       SUBST (SET_SRC (x), src);
5926     }
5927 
5928   /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5929      the comparison result and try to simplify it unless we already have used
5930      undobuf.other_insn.  */
5931   if ((GET_MODE_CLASS (mode) == MODE_CC
5932        || GET_CODE (src) == COMPARE
5933        || CC0_P (dest))
5934       && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5935       && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5936       && COMPARISON_P (*cc_use)
5937       && rtx_equal_p (XEXP (*cc_use, 0), dest))
5938     {
5939       enum rtx_code old_code = GET_CODE (*cc_use);
5940       enum rtx_code new_code;
5941       rtx op0, op1, tmp;
5942       int other_changed = 0;
5943       enum machine_mode compare_mode = GET_MODE (dest);
5944 
5945       if (GET_CODE (src) == COMPARE)
5946 	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5947       else
5948 	op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5949 
5950       tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5951 					   op0, op1);
5952       if (!tmp)
5953 	new_code = old_code;
5954       else if (!CONSTANT_P (tmp))
5955 	{
5956 	  new_code = GET_CODE (tmp);
5957 	  op0 = XEXP (tmp, 0);
5958 	  op1 = XEXP (tmp, 1);
5959 	}
5960       else
5961 	{
5962 	  rtx pat = PATTERN (other_insn);
5963 	  undobuf.other_insn = other_insn;
5964 	  SUBST (*cc_use, tmp);
5965 
5966 	  /* Attempt to simplify CC user.  */
5967 	  if (GET_CODE (pat) == SET)
5968 	    {
5969 	      rtx new_rtx = simplify_rtx (SET_SRC (pat));
5970 	      if (new_rtx != NULL_RTX)
5971 		SUBST (SET_SRC (pat), new_rtx);
5972 	    }
5973 
5974 	  /* Convert X into a no-op move.  */
5975 	  SUBST (SET_DEST (x), pc_rtx);
5976 	  SUBST (SET_SRC (x), pc_rtx);
5977 	  return x;
5978 	}
5979 
5980       /* Simplify our comparison, if possible.  */
5981       new_code = simplify_comparison (new_code, &op0, &op1);
5982 
5983 #ifdef SELECT_CC_MODE
5984       /* If this machine has CC modes other than CCmode, check to see if we
5985 	 need to use a different CC mode here.  */
5986       if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5987 	compare_mode = GET_MODE (op0);
5988       else
5989 	compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5990 
5991 #ifndef HAVE_cc0
5992       /* If the mode changed, we have to change SET_DEST, the mode in the
5993 	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5994 	 a hard register, just build new versions with the proper mode.  If it
5995 	 is a pseudo, we lose unless it is only time we set the pseudo, in
5996 	 which case we can safely change its mode.  */
5997       if (compare_mode != GET_MODE (dest))
5998 	{
5999 	  if (can_change_dest_mode (dest, 0, compare_mode))
6000 	    {
6001 	      unsigned int regno = REGNO (dest);
6002 	      rtx new_dest;
6003 
6004 	      if (regno < FIRST_PSEUDO_REGISTER)
6005 		new_dest = gen_rtx_REG (compare_mode, regno);
6006 	      else
6007 		{
6008 		  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
6009 		  new_dest = regno_reg_rtx[regno];
6010 		}
6011 
6012 	      SUBST (SET_DEST (x), new_dest);
6013 	      SUBST (XEXP (*cc_use, 0), new_dest);
6014 	      other_changed = 1;
6015 
6016 	      dest = new_dest;
6017 	    }
6018 	}
6019 #endif  /* cc0 */
6020 #endif  /* SELECT_CC_MODE */
6021 
6022       /* If the code changed, we have to build a new comparison in
6023 	 undobuf.other_insn.  */
6024       if (new_code != old_code)
6025 	{
6026 	  int other_changed_previously = other_changed;
6027 	  unsigned HOST_WIDE_INT mask;
6028 	  rtx old_cc_use = *cc_use;
6029 
6030 	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
6031 					  dest, const0_rtx));
6032 	  other_changed = 1;
6033 
6034 	  /* If the only change we made was to change an EQ into an NE or
6035 	     vice versa, OP0 has only one bit that might be nonzero, and OP1
6036 	     is zero, check if changing the user of the condition code will
6037 	     produce a valid insn.  If it won't, we can keep the original code
6038 	     in that insn by surrounding our operation with an XOR.  */
6039 
6040 	  if (((old_code == NE && new_code == EQ)
6041 	       || (old_code == EQ && new_code == NE))
6042 	      && ! other_changed_previously && op1 == const0_rtx
6043 	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
6044 	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
6045 	    {
6046 	      rtx pat = PATTERN (other_insn), note = 0;
6047 
6048 	      if ((recog_for_combine (&pat, other_insn, &note) < 0
6049 		   && ! check_asm_operands (pat)))
6050 		{
6051 		  *cc_use = old_cc_use;
6052 		  other_changed = 0;
6053 
6054 		  op0 = simplify_gen_binary (XOR, GET_MODE (op0),
6055 					     op0, GEN_INT (mask));
6056 		}
6057 	    }
6058 	}
6059 
6060       if (other_changed)
6061 	undobuf.other_insn = other_insn;
6062 
6063       /* Otherwise, if we didn't previously have a COMPARE in the
6064 	 correct mode, we need one.  */
6065       if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
6066 	{
6067 	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6068 	  src = SET_SRC (x);
6069 	}
6070       else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
6071 	{
6072 	  SUBST (SET_SRC (x), op0);
6073 	  src = SET_SRC (x);
6074 	}
6075       /* Otherwise, update the COMPARE if needed.  */
6076       else if (XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
6077 	{
6078 	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6079 	  src = SET_SRC (x);
6080 	}
6081     }
6082   else
6083     {
6084       /* Get SET_SRC in a form where we have placed back any
6085 	 compound expressions.  Then do the checks below.  */
6086       src = make_compound_operation (src, SET);
6087       SUBST (SET_SRC (x), src);
6088     }
6089 
6090   /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
6091      and X being a REG or (subreg (reg)), we may be able to convert this to
6092      (set (subreg:m2 x) (op)).
6093 
6094      We can always do this if M1 is narrower than M2 because that means that
6095      we only care about the low bits of the result.
6096 
6097      However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
6098      perform a narrower operation than requested since the high-order bits will
6099      be undefined.  On machine where it is defined, this transformation is safe
6100      as long as M1 and M2 have the same number of words.  */
6101 
6102   if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6103       && !OBJECT_P (SUBREG_REG (src))
6104       && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
6105 	   / UNITS_PER_WORD)
6106 	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6107 	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
6108 #ifndef WORD_REGISTER_OPERATIONS
6109       && (GET_MODE_SIZE (GET_MODE (src))
6110 	< GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6111 #endif
6112 #ifdef CANNOT_CHANGE_MODE_CLASS
6113       && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
6114 	    && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
6115 					 GET_MODE (SUBREG_REG (src)),
6116 					 GET_MODE (src)))
6117 #endif
6118       && (REG_P (dest)
6119 	  || (GET_CODE (dest) == SUBREG
6120 	      && REG_P (SUBREG_REG (dest)))))
6121     {
6122       SUBST (SET_DEST (x),
6123 	     gen_lowpart (GET_MODE (SUBREG_REG (src)),
6124 				      dest));
6125       SUBST (SET_SRC (x), SUBREG_REG (src));
6126 
6127       src = SET_SRC (x), dest = SET_DEST (x);
6128     }
6129 
6130 #ifdef HAVE_cc0
6131   /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
6132      in SRC.  */
6133   if (dest == cc0_rtx
6134       && GET_CODE (src) == SUBREG
6135       && subreg_lowpart_p (src)
6136       && (GET_MODE_BITSIZE (GET_MODE (src))
6137 	  < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
6138     {
6139       rtx inner = SUBREG_REG (src);
6140       enum machine_mode inner_mode = GET_MODE (inner);
6141 
6142       /* Here we make sure that we don't have a sign bit on.  */
6143       if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
6144 	  && (nonzero_bits (inner, inner_mode)
6145 	      < ((unsigned HOST_WIDE_INT) 1
6146 		 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
6147 	{
6148 	  SUBST (SET_SRC (x), inner);
6149 	  src = SET_SRC (x);
6150 	}
6151     }
6152 #endif
6153 
6154 #ifdef LOAD_EXTEND_OP
6155   /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
6156      would require a paradoxical subreg.  Replace the subreg with a
6157      zero_extend to avoid the reload that would otherwise be required.  */
6158 
6159   if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6160       && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (src)))
6161       && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
6162       && SUBREG_BYTE (src) == 0
6163       && (GET_MODE_SIZE (GET_MODE (src))
6164 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6165       && MEM_P (SUBREG_REG (src)))
6166     {
6167       SUBST (SET_SRC (x),
6168 	     gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
6169 			    GET_MODE (src), SUBREG_REG (src)));
6170 
6171       src = SET_SRC (x);
6172     }
6173 #endif
6174 
6175   /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
6176      are comparing an item known to be 0 or -1 against 0, use a logical
6177      operation instead. Check for one of the arms being an IOR of the other
6178      arm with some value.  We compute three terms to be IOR'ed together.  In
6179      practice, at most two will be nonzero.  Then we do the IOR's.  */
6180 
6181   if (GET_CODE (dest) != PC
6182       && GET_CODE (src) == IF_THEN_ELSE
6183       && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
6184       && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
6185       && XEXP (XEXP (src, 0), 1) == const0_rtx
6186       && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
6187 #ifdef HAVE_conditional_move
6188       && ! can_conditionally_move_p (GET_MODE (src))
6189 #endif
6190       && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
6191 			       GET_MODE (XEXP (XEXP (src, 0), 0)))
6192 	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
6193       && ! side_effects_p (src))
6194     {
6195       rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
6196 		      ? XEXP (src, 1) : XEXP (src, 2));
6197       rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
6198 		   ? XEXP (src, 2) : XEXP (src, 1));
6199       rtx term1 = const0_rtx, term2, term3;
6200 
6201       if (GET_CODE (true_rtx) == IOR
6202 	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
6203 	term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
6204       else if (GET_CODE (true_rtx) == IOR
6205 	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
6206 	term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
6207       else if (GET_CODE (false_rtx) == IOR
6208 	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
6209 	term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
6210       else if (GET_CODE (false_rtx) == IOR
6211 	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
6212 	term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
6213 
6214       term2 = simplify_gen_binary (AND, GET_MODE (src),
6215 				   XEXP (XEXP (src, 0), 0), true_rtx);
6216       term3 = simplify_gen_binary (AND, GET_MODE (src),
6217 				   simplify_gen_unary (NOT, GET_MODE (src),
6218 						       XEXP (XEXP (src, 0), 0),
6219 						       GET_MODE (src)),
6220 				   false_rtx);
6221 
6222       SUBST (SET_SRC (x),
6223 	     simplify_gen_binary (IOR, GET_MODE (src),
6224 				  simplify_gen_binary (IOR, GET_MODE (src),
6225 						       term1, term2),
6226 				  term3));
6227 
6228       src = SET_SRC (x);
6229     }
6230 
6231   /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
6232      whole thing fail.  */
6233   if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
6234     return src;
6235   else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
6236     return dest;
6237   else
6238     /* Convert this into a field assignment operation, if possible.  */
6239     return make_field_assignment (x);
6240 }
6241 
6242 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
6243    result.  */
6244 
6245 static rtx
6246 simplify_logical (rtx x)
6247 {
6248   enum machine_mode mode = GET_MODE (x);
6249   rtx op0 = XEXP (x, 0);
6250   rtx op1 = XEXP (x, 1);
6251 
6252   switch (GET_CODE (x))
6253     {
6254     case AND:
6255       /* We can call simplify_and_const_int only if we don't lose
6256 	 any (sign) bits when converting INTVAL (op1) to
6257 	 "unsigned HOST_WIDE_INT".  */
6258       if (CONST_INT_P (op1)
6259 	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6260 	      || INTVAL (op1) > 0))
6261 	{
6262 	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
6263 	  if (GET_CODE (x) != AND)
6264 	    return x;
6265 
6266 	  op0 = XEXP (x, 0);
6267 	  op1 = XEXP (x, 1);
6268 	}
6269 
6270       /* If we have any of (and (ior A B) C) or (and (xor A B) C),
6271 	 apply the distributive law and then the inverse distributive
6272 	 law to see if things simplify.  */
6273       if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
6274 	{
6275 	  rtx result = distribute_and_simplify_rtx (x, 0);
6276 	  if (result)
6277 	    return result;
6278 	}
6279       if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
6280 	{
6281 	  rtx result = distribute_and_simplify_rtx (x, 1);
6282 	  if (result)
6283 	    return result;
6284 	}
6285       break;
6286 
6287     case IOR:
6288       /* If we have (ior (and A B) C), apply the distributive law and then
6289 	 the inverse distributive law to see if things simplify.  */
6290 
6291       if (GET_CODE (op0) == AND)
6292 	{
6293 	  rtx result = distribute_and_simplify_rtx (x, 0);
6294 	  if (result)
6295 	    return result;
6296 	}
6297 
6298       if (GET_CODE (op1) == AND)
6299 	{
6300 	  rtx result = distribute_and_simplify_rtx (x, 1);
6301 	  if (result)
6302 	    return result;
6303 	}
6304       break;
6305 
6306     default:
6307       gcc_unreachable ();
6308     }
6309 
6310   return x;
6311 }
6312 
6313 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
6314    operations" because they can be replaced with two more basic operations.
6315    ZERO_EXTEND is also considered "compound" because it can be replaced with
6316    an AND operation, which is simpler, though only one operation.
6317 
6318    The function expand_compound_operation is called with an rtx expression
6319    and will convert it to the appropriate shifts and AND operations,
6320    simplifying at each stage.
6321 
6322    The function make_compound_operation is called to convert an expression
6323    consisting of shifts and ANDs into the equivalent compound expression.
6324    It is the inverse of this function, loosely speaking.  */
6325 
6326 static rtx
6327 expand_compound_operation (rtx x)
6328 {
6329   unsigned HOST_WIDE_INT pos = 0, len;
6330   int unsignedp = 0;
6331   unsigned int modewidth;
6332   rtx tem;
6333 
6334   switch (GET_CODE (x))
6335     {
6336     case ZERO_EXTEND:
6337       unsignedp = 1;
6338     case SIGN_EXTEND:
6339       /* We can't necessarily use a const_int for a multiword mode;
6340 	 it depends on implicitly extending the value.
6341 	 Since we don't know the right way to extend it,
6342 	 we can't tell whether the implicit way is right.
6343 
6344 	 Even for a mode that is no wider than a const_int,
6345 	 we can't win, because we need to sign extend one of its bits through
6346 	 the rest of it, and we don't know which bit.  */
6347       if (CONST_INT_P (XEXP (x, 0)))
6348 	return x;
6349 
6350       /* Return if (subreg:MODE FROM 0) is not a safe replacement for
6351 	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
6352 	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
6353 	 reloaded. If not for that, MEM's would very rarely be safe.
6354 
6355 	 Reject MODEs bigger than a word, because we might not be able
6356 	 to reference a two-register group starting with an arbitrary register
6357 	 (and currently gen_lowpart might crash for a SUBREG).  */
6358 
6359       if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
6360 	return x;
6361 
6362       /* Reject MODEs that aren't scalar integers because turning vector
6363 	 or complex modes into shifts causes problems.  */
6364 
6365       if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6366 	return x;
6367 
6368       len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
6369       /* If the inner object has VOIDmode (the only way this can happen
6370 	 is if it is an ASM_OPERANDS), we can't do anything since we don't
6371 	 know how much masking to do.  */
6372       if (len == 0)
6373 	return x;
6374 
6375       break;
6376 
6377     case ZERO_EXTRACT:
6378       unsignedp = 1;
6379 
6380       /* ... fall through ...  */
6381 
6382     case SIGN_EXTRACT:
6383       /* If the operand is a CLOBBER, just return it.  */
6384       if (GET_CODE (XEXP (x, 0)) == CLOBBER)
6385 	return XEXP (x, 0);
6386 
6387       if (!CONST_INT_P (XEXP (x, 1))
6388 	  || !CONST_INT_P (XEXP (x, 2))
6389 	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
6390 	return x;
6391 
6392       /* Reject MODEs that aren't scalar integers because turning vector
6393 	 or complex modes into shifts causes problems.  */
6394 
6395       if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6396 	return x;
6397 
6398       len = INTVAL (XEXP (x, 1));
6399       pos = INTVAL (XEXP (x, 2));
6400 
6401       /* This should stay within the object being extracted, fail otherwise.  */
6402       if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
6403 	return x;
6404 
6405       if (BITS_BIG_ENDIAN)
6406 	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
6407 
6408       break;
6409 
6410     default:
6411       return x;
6412     }
6413   /* Convert sign extension to zero extension, if we know that the high
6414      bit is not set, as this is easier to optimize.  It will be converted
6415      back to cheaper alternative in make_extraction.  */
6416   if (GET_CODE (x) == SIGN_EXTEND
6417       && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6418 	  && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6419 		& ~(((unsigned HOST_WIDE_INT)
6420 		      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
6421 		     >> 1))
6422 	       == 0)))
6423     {
6424       rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
6425       rtx temp2 = expand_compound_operation (temp);
6426 
6427       /* Make sure this is a profitable operation.  */
6428       if (rtx_cost (x, SET, optimize_this_for_speed_p)
6429           > rtx_cost (temp2, SET, optimize_this_for_speed_p))
6430        return temp2;
6431       else if (rtx_cost (x, SET, optimize_this_for_speed_p)
6432                > rtx_cost (temp, SET, optimize_this_for_speed_p))
6433        return temp;
6434       else
6435        return x;
6436     }
6437 
6438   /* We can optimize some special cases of ZERO_EXTEND.  */
6439   if (GET_CODE (x) == ZERO_EXTEND)
6440     {
6441       /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
6442 	 know that the last value didn't have any inappropriate bits
6443 	 set.  */
6444       if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6445 	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6446 	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6447 	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
6448 	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6449 	return XEXP (XEXP (x, 0), 0);
6450 
6451       /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6452       if (GET_CODE (XEXP (x, 0)) == SUBREG
6453 	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6454 	  && subreg_lowpart_p (XEXP (x, 0))
6455 	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6456 	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
6457 	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6458 	return SUBREG_REG (XEXP (x, 0));
6459 
6460       /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
6461 	 is a comparison and STORE_FLAG_VALUE permits.  This is like
6462 	 the first case, but it works even when GET_MODE (x) is larger
6463 	 than HOST_WIDE_INT.  */
6464       if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6465 	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6466 	  && COMPARISON_P (XEXP (XEXP (x, 0), 0))
6467 	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6468 	      <= HOST_BITS_PER_WIDE_INT)
6469 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6470 	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6471 	return XEXP (XEXP (x, 0), 0);
6472 
6473       /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6474       if (GET_CODE (XEXP (x, 0)) == SUBREG
6475 	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6476 	  && subreg_lowpart_p (XEXP (x, 0))
6477 	  && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
6478 	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6479 	      <= HOST_BITS_PER_WIDE_INT)
6480 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6481 	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6482 	return SUBREG_REG (XEXP (x, 0));
6483 
6484     }
6485 
6486   /* If we reach here, we want to return a pair of shifts.  The inner
6487      shift is a left shift of BITSIZE - POS - LEN bits.  The outer
6488      shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
6489      logical depending on the value of UNSIGNEDP.
6490 
6491      If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6492      converted into an AND of a shift.
6493 
6494      We must check for the case where the left shift would have a negative
6495      count.  This can happen in a case like (x >> 31) & 255 on machines
6496      that can't shift by a constant.  On those machines, we would first
6497      combine the shift with the AND to produce a variable-position
6498      extraction.  Then the constant of 31 would be substituted in to produce
6499      a such a position.  */
6500 
6501   modewidth = GET_MODE_BITSIZE (GET_MODE (x));
6502   if (modewidth + len >= pos)
6503     {
6504       enum machine_mode mode = GET_MODE (x);
6505       tem = gen_lowpart (mode, XEXP (x, 0));
6506       if (!tem || GET_CODE (tem) == CLOBBER)
6507 	return x;
6508       tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
6509 				  tem, modewidth - pos - len);
6510       tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6511 				  mode, tem, modewidth - len);
6512     }
6513   else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6514     tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6515 				  simplify_shift_const (NULL_RTX, LSHIFTRT,
6516 							GET_MODE (x),
6517 							XEXP (x, 0), pos),
6518 				  ((HOST_WIDE_INT) 1 << len) - 1);
6519   else
6520     /* Any other cases we can't handle.  */
6521     return x;
6522 
6523   /* If we couldn't do this for some reason, return the original
6524      expression.  */
6525   if (GET_CODE (tem) == CLOBBER)
6526     return x;
6527 
6528   return tem;
6529 }
6530 
6531 /* X is a SET which contains an assignment of one object into
6532    a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6533    or certain SUBREGS). If possible, convert it into a series of
6534    logical operations.
6535 
6536    We half-heartedly support variable positions, but do not at all
6537    support variable lengths.  */
6538 
6539 static const_rtx
6540 expand_field_assignment (const_rtx x)
6541 {
6542   rtx inner;
6543   rtx pos;			/* Always counts from low bit.  */
6544   int len;
6545   rtx mask, cleared, masked;
6546   enum machine_mode compute_mode;
6547 
6548   /* Loop until we find something we can't simplify.  */
6549   while (1)
6550     {
6551       if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6552 	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
6553 	{
6554 	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
6555 	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
6556 	  pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
6557 	}
6558       else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
6559 	       && CONST_INT_P (XEXP (SET_DEST (x), 1)))
6560 	{
6561 	  inner = XEXP (SET_DEST (x), 0);
6562 	  len = INTVAL (XEXP (SET_DEST (x), 1));
6563 	  pos = XEXP (SET_DEST (x), 2);
6564 
6565 	  /* A constant position should stay within the width of INNER.  */
6566 	  if (CONST_INT_P (pos)
6567 	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
6568 	    break;
6569 
6570 	  if (BITS_BIG_ENDIAN)
6571 	    {
6572 	      if (CONST_INT_P (pos))
6573 		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
6574 			       - INTVAL (pos));
6575 	      else if (GET_CODE (pos) == MINUS
6576 		       && CONST_INT_P (XEXP (pos, 1))
6577 		       && (INTVAL (XEXP (pos, 1))
6578 			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
6579 		/* If position is ADJUST - X, new position is X.  */
6580 		pos = XEXP (pos, 0);
6581 	      else
6582 		pos = simplify_gen_binary (MINUS, GET_MODE (pos),
6583 					   GEN_INT (GET_MODE_BITSIZE (
6584 						    GET_MODE (inner))
6585 						    - len),
6586 					   pos);
6587 	    }
6588 	}
6589 
6590       /* A SUBREG between two modes that occupy the same numbers of words
6591 	 can be done by moving the SUBREG to the source.  */
6592       else if (GET_CODE (SET_DEST (x)) == SUBREG
6593 	       /* We need SUBREGs to compute nonzero_bits properly.  */
6594 	       && nonzero_sign_valid
6595 	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6596 		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6597 		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6598 			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6599 	{
6600 	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6601 			   gen_lowpart
6602 			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
6603 			    SET_SRC (x)));
6604 	  continue;
6605 	}
6606       else
6607 	break;
6608 
6609       while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6610 	inner = SUBREG_REG (inner);
6611 
6612       compute_mode = GET_MODE (inner);
6613 
6614       /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
6615       if (! SCALAR_INT_MODE_P (compute_mode))
6616 	{
6617 	  enum machine_mode imode;
6618 
6619 	  /* Don't do anything for vector or complex integral types.  */
6620 	  if (! FLOAT_MODE_P (compute_mode))
6621 	    break;
6622 
6623 	  /* Try to find an integral mode to pun with.  */
6624 	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6625 	  if (imode == BLKmode)
6626 	    break;
6627 
6628 	  compute_mode = imode;
6629 	  inner = gen_lowpart (imode, inner);
6630 	}
6631 
6632       /* Compute a mask of LEN bits, if we can do this on the host machine.  */
6633       if (len >= HOST_BITS_PER_WIDE_INT)
6634 	break;
6635 
6636       /* Now compute the equivalent expression.  Make a copy of INNER
6637 	 for the SET_DEST in case it is a MEM into which we will substitute;
6638 	 we don't want shared RTL in that case.  */
6639       mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6640       cleared = simplify_gen_binary (AND, compute_mode,
6641 				     simplify_gen_unary (NOT, compute_mode,
6642 				       simplify_gen_binary (ASHIFT,
6643 							    compute_mode,
6644 							    mask, pos),
6645 				       compute_mode),
6646 				     inner);
6647       masked = simplify_gen_binary (ASHIFT, compute_mode,
6648 				    simplify_gen_binary (
6649 				      AND, compute_mode,
6650 				      gen_lowpart (compute_mode, SET_SRC (x)),
6651 				      mask),
6652 				    pos);
6653 
6654       x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
6655 		       simplify_gen_binary (IOR, compute_mode,
6656 					    cleared, masked));
6657     }
6658 
6659   return x;
6660 }
6661 
6662 /* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
6663    it is an RTX that represents a variable starting position; otherwise,
6664    POS is the (constant) starting bit position (counted from the LSB).
6665 
6666    UNSIGNEDP is nonzero for an unsigned reference and zero for a
6667    signed reference.
6668 
6669    IN_DEST is nonzero if this is a reference in the destination of a
6670    SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
6671    a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6672    be used.
6673 
6674    IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
6675    ZERO_EXTRACT should be built even for bits starting at bit 0.
6676 
6677    MODE is the desired mode of the result (if IN_DEST == 0).
6678 
6679    The result is an RTX for the extraction or NULL_RTX if the target
6680    can't handle it.  */
6681 
6682 static rtx
6683 make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
6684 		 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
6685 		 int in_dest, int in_compare)
6686 {
6687   /* This mode describes the size of the storage area
6688      to fetch the overall value from.  Within that, we
6689      ignore the POS lowest bits, etc.  */
6690   enum machine_mode is_mode = GET_MODE (inner);
6691   enum machine_mode inner_mode;
6692   enum machine_mode wanted_inner_mode;
6693   enum machine_mode wanted_inner_reg_mode = word_mode;
6694   enum machine_mode pos_mode = word_mode;
6695   enum machine_mode extraction_mode = word_mode;
6696   enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6697   rtx new_rtx = 0;
6698   rtx orig_pos_rtx = pos_rtx;
6699   HOST_WIDE_INT orig_pos;
6700 
6701   if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6702     {
6703       /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6704 	 consider just the QI as the memory to extract from.
6705 	 The subreg adds or removes high bits; its mode is
6706 	 irrelevant to the meaning of this extraction,
6707 	 since POS and LEN count from the lsb.  */
6708       if (MEM_P (SUBREG_REG (inner)))
6709 	is_mode = GET_MODE (SUBREG_REG (inner));
6710       inner = SUBREG_REG (inner);
6711     }
6712   else if (GET_CODE (inner) == ASHIFT
6713 	   && CONST_INT_P (XEXP (inner, 1))
6714 	   && pos_rtx == 0 && pos == 0
6715 	   && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6716     {
6717       /* We're extracting the least significant bits of an rtx
6718 	 (ashift X (const_int C)), where LEN > C.  Extract the
6719 	 least significant (LEN - C) bits of X, giving an rtx
6720 	 whose mode is MODE, then shift it left C times.  */
6721       new_rtx = make_extraction (mode, XEXP (inner, 0),
6722 			     0, 0, len - INTVAL (XEXP (inner, 1)),
6723 			     unsignedp, in_dest, in_compare);
6724       if (new_rtx != 0)
6725 	return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
6726     }
6727 
6728   inner_mode = GET_MODE (inner);
6729 
6730   if (pos_rtx && CONST_INT_P (pos_rtx))
6731     pos = INTVAL (pos_rtx), pos_rtx = 0;
6732 
6733   /* See if this can be done without an extraction.  We never can if the
6734      width of the field is not the same as that of some integer mode. For
6735      registers, we can only avoid the extraction if the position is at the
6736      low-order bit and this is either not in the destination or we have the
6737      appropriate STRICT_LOW_PART operation available.
6738 
6739      For MEM, we can avoid an extract if the field starts on an appropriate
6740      boundary and we can change the mode of the memory reference.  */
6741 
6742   if (tmode != BLKmode
6743       && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6744 	   && !MEM_P (inner)
6745 	   && (inner_mode == tmode
6746 	       || !REG_P (inner)
6747 	       || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
6748 					 GET_MODE_BITSIZE (inner_mode))
6749 	       || reg_truncated_to_mode (tmode, inner))
6750 	   && (! in_dest
6751 	       || (REG_P (inner)
6752 		   && have_insn_for (STRICT_LOW_PART, tmode))))
6753 	  || (MEM_P (inner) && pos_rtx == 0
6754 	      && (pos
6755 		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6756 		     : BITS_PER_UNIT)) == 0
6757 	      /* We can't do this if we are widening INNER_MODE (it
6758 		 may not be aligned, for one thing).  */
6759 	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6760 	      && (inner_mode == tmode
6761 		  || (! mode_dependent_address_p (XEXP (inner, 0))
6762 		      && ! MEM_VOLATILE_P (inner))))))
6763     {
6764       /* If INNER is a MEM, make a new MEM that encompasses just the desired
6765 	 field.  If the original and current mode are the same, we need not
6766 	 adjust the offset.  Otherwise, we do if bytes big endian.
6767 
6768 	 If INNER is not a MEM, get a piece consisting of just the field
6769 	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
6770 
6771       if (MEM_P (inner))
6772 	{
6773 	  HOST_WIDE_INT offset;
6774 
6775 	  /* POS counts from lsb, but make OFFSET count in memory order.  */
6776 	  if (BYTES_BIG_ENDIAN)
6777 	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6778 	  else
6779 	    offset = pos / BITS_PER_UNIT;
6780 
6781 	  new_rtx = adjust_address_nv (inner, tmode, offset);
6782 	}
6783       else if (REG_P (inner))
6784 	{
6785 	  if (tmode != inner_mode)
6786 	    {
6787 	      /* We can't call gen_lowpart in a DEST since we
6788 		 always want a SUBREG (see below) and it would sometimes
6789 		 return a new hard register.  */
6790 	      if (pos || in_dest)
6791 		{
6792 		  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6793 
6794 		  if (WORDS_BIG_ENDIAN
6795 		      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6796 		    final_word = ((GET_MODE_SIZE (inner_mode)
6797 				   - GET_MODE_SIZE (tmode))
6798 				  / UNITS_PER_WORD) - final_word;
6799 
6800 		  final_word *= UNITS_PER_WORD;
6801 		  if (BYTES_BIG_ENDIAN &&
6802 		      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6803 		    final_word += (GET_MODE_SIZE (inner_mode)
6804 				   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6805 
6806 		  /* Avoid creating invalid subregs, for example when
6807 		     simplifying (x>>32)&255.  */
6808 		  if (!validate_subreg (tmode, inner_mode, inner, final_word))
6809 		    return NULL_RTX;
6810 
6811 		  new_rtx = gen_rtx_SUBREG (tmode, inner, final_word);
6812 		}
6813 	      else
6814 		new_rtx = gen_lowpart (tmode, inner);
6815 	    }
6816 	  else
6817 	    new_rtx = inner;
6818 	}
6819       else
6820 	new_rtx = force_to_mode (inner, tmode,
6821 			     len >= HOST_BITS_PER_WIDE_INT
6822 			     ? ~(unsigned HOST_WIDE_INT) 0
6823 			     : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6824 			     0);
6825 
6826       /* If this extraction is going into the destination of a SET,
6827 	 make a STRICT_LOW_PART unless we made a MEM.  */
6828 
6829       if (in_dest)
6830 	return (MEM_P (new_rtx) ? new_rtx
6831 		: (GET_CODE (new_rtx) != SUBREG
6832 		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6833 		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
6834 
6835       if (mode == tmode)
6836 	return new_rtx;
6837 
6838       if (CONST_INT_P (new_rtx)
6839 	  || GET_CODE (new_rtx) == CONST_DOUBLE)
6840 	return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6841 					 mode, new_rtx, tmode);
6842 
6843       /* If we know that no extraneous bits are set, and that the high
6844 	 bit is not set, convert the extraction to the cheaper of
6845 	 sign and zero extension, that are equivalent in these cases.  */
6846       if (flag_expensive_optimizations
6847 	  && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6848 	      && ((nonzero_bits (new_rtx, tmode)
6849 		   & ~(((unsigned HOST_WIDE_INT)
6850 			GET_MODE_MASK (tmode))
6851 		       >> 1))
6852 		  == 0)))
6853 	{
6854 	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
6855 	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
6856 
6857 	  /* Prefer ZERO_EXTENSION, since it gives more information to
6858 	     backends.  */
6859 	  if (rtx_cost (temp, SET, optimize_this_for_speed_p)
6860 	      <= rtx_cost (temp1, SET, optimize_this_for_speed_p))
6861 	    return temp;
6862 	  return temp1;
6863 	}
6864 
6865       /* Otherwise, sign- or zero-extend unless we already are in the
6866 	 proper mode.  */
6867 
6868       return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6869 			     mode, new_rtx));
6870     }
6871 
6872   /* Unless this is a COMPARE or we have a funny memory reference,
6873      don't do anything with zero-extending field extracts starting at
6874      the low-order bit since they are simple AND operations.  */
6875   if (pos_rtx == 0 && pos == 0 && ! in_dest
6876       && ! in_compare && unsignedp)
6877     return 0;
6878 
6879   /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6880      if the position is not a constant and the length is not 1.  In all
6881      other cases, we would only be going outside our object in cases when
6882      an original shift would have been undefined.  */
6883   if (MEM_P (inner)
6884       && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6885 	  || (pos_rtx != 0 && len != 1)))
6886     return 0;
6887 
6888   /* Get the mode to use should INNER not be a MEM, the mode for the position,
6889      and the mode for the result.  */
6890   if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6891     {
6892       wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6893       pos_mode = mode_for_extraction (EP_insv, 2);
6894       extraction_mode = mode_for_extraction (EP_insv, 3);
6895     }
6896 
6897   if (! in_dest && unsignedp
6898       && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6899     {
6900       wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6901       pos_mode = mode_for_extraction (EP_extzv, 3);
6902       extraction_mode = mode_for_extraction (EP_extzv, 0);
6903     }
6904 
6905   if (! in_dest && ! unsignedp
6906       && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6907     {
6908       wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6909       pos_mode = mode_for_extraction (EP_extv, 3);
6910       extraction_mode = mode_for_extraction (EP_extv, 0);
6911     }
6912 
6913   /* Never narrow an object, since that might not be safe.  */
6914 
6915   if (mode != VOIDmode
6916       && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6917     extraction_mode = mode;
6918 
6919   if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6920       && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6921     pos_mode = GET_MODE (pos_rtx);
6922 
6923   /* If this is not from memory, the desired mode is the preferred mode
6924      for an extraction pattern's first input operand, or word_mode if there
6925      is none.  */
6926   if (!MEM_P (inner))
6927     wanted_inner_mode = wanted_inner_reg_mode;
6928   else
6929     {
6930       /* Be careful not to go beyond the extracted object and maintain the
6931 	 natural alignment of the memory.  */
6932       wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
6933       while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
6934 	     > GET_MODE_BITSIZE (wanted_inner_mode))
6935 	{
6936 	  wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode);
6937 	  gcc_assert (wanted_inner_mode != VOIDmode);
6938 	}
6939 
6940       /* If we have to change the mode of memory and cannot, the desired mode
6941 	 is EXTRACTION_MODE.  */
6942       if (inner_mode != wanted_inner_mode
6943 	  && (mode_dependent_address_p (XEXP (inner, 0))
6944 	      || MEM_VOLATILE_P (inner)
6945 	      || pos_rtx))
6946 	wanted_inner_mode = extraction_mode;
6947     }
6948 
6949   orig_pos = pos;
6950 
6951   if (BITS_BIG_ENDIAN)
6952     {
6953       /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6954 	 BITS_BIG_ENDIAN style.  If position is constant, compute new
6955 	 position.  Otherwise, build subtraction.
6956 	 Note that POS is relative to the mode of the original argument.
6957 	 If it's a MEM we need to recompute POS relative to that.
6958 	 However, if we're extracting from (or inserting into) a register,
6959 	 we want to recompute POS relative to wanted_inner_mode.  */
6960       int width = (MEM_P (inner)
6961 		   ? GET_MODE_BITSIZE (is_mode)
6962 		   : GET_MODE_BITSIZE (wanted_inner_mode));
6963 
6964       if (pos_rtx == 0)
6965 	pos = width - len - pos;
6966       else
6967 	pos_rtx
6968 	  = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6969       /* POS may be less than 0 now, but we check for that below.
6970 	 Note that it can only be less than 0 if !MEM_P (inner).  */
6971     }
6972 
6973   /* If INNER has a wider mode, and this is a constant extraction, try to
6974      make it smaller and adjust the byte to point to the byte containing
6975      the value.  */
6976   if (wanted_inner_mode != VOIDmode
6977       && inner_mode != wanted_inner_mode
6978       && ! pos_rtx
6979       && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6980       && MEM_P (inner)
6981       && ! mode_dependent_address_p (XEXP (inner, 0))
6982       && ! MEM_VOLATILE_P (inner))
6983     {
6984       int offset = 0;
6985 
6986       /* The computations below will be correct if the machine is big
6987 	 endian in both bits and bytes or little endian in bits and bytes.
6988 	 If it is mixed, we must adjust.  */
6989 
6990       /* If bytes are big endian and we had a paradoxical SUBREG, we must
6991 	 adjust OFFSET to compensate.  */
6992       if (BYTES_BIG_ENDIAN
6993 	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6994 	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6995 
6996       /* We can now move to the desired byte.  */
6997       offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
6998 		* GET_MODE_SIZE (wanted_inner_mode);
6999       pos %= GET_MODE_BITSIZE (wanted_inner_mode);
7000 
7001       if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
7002 	  && is_mode != wanted_inner_mode)
7003 	offset = (GET_MODE_SIZE (is_mode)
7004 		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
7005 
7006       inner = adjust_address_nv (inner, wanted_inner_mode, offset);
7007     }
7008 
7009   /* If INNER is not memory, get it into the proper mode.  If we are changing
7010      its mode, POS must be a constant and smaller than the size of the new
7011      mode.  */
7012   else if (!MEM_P (inner))
7013     {
7014       /* On the LHS, don't create paradoxical subregs implicitely truncating
7015 	 the register unless TRULY_NOOP_TRUNCATION.  */
7016       if (in_dest
7017 	  && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (inner)),
7018 				     GET_MODE_BITSIZE (wanted_inner_mode)))
7019 	return NULL_RTX;
7020 
7021       if (GET_MODE (inner) != wanted_inner_mode
7022 	  && (pos_rtx != 0
7023 	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
7024 	return NULL_RTX;
7025 
7026       if (orig_pos < 0)
7027 	return NULL_RTX;
7028 
7029       inner = force_to_mode (inner, wanted_inner_mode,
7030 			     pos_rtx
7031 			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
7032 			     ? ~(unsigned HOST_WIDE_INT) 0
7033 			     : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
7034 				<< orig_pos),
7035 			     0);
7036     }
7037 
7038   /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
7039      have to zero extend.  Otherwise, we can just use a SUBREG.  */
7040   if (pos_rtx != 0
7041       && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
7042     {
7043       rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
7044 
7045       /* If we know that no extraneous bits are set, and that the high
7046 	 bit is not set, convert extraction to cheaper one - either
7047 	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
7048 	 cases.  */
7049       if (flag_expensive_optimizations
7050 	  && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
7051 	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
7052 		   & ~(((unsigned HOST_WIDE_INT)
7053 			GET_MODE_MASK (GET_MODE (pos_rtx)))
7054 		       >> 1))
7055 		  == 0)))
7056 	{
7057 	  rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
7058 
7059 	  /* Prefer ZERO_EXTENSION, since it gives more information to
7060 	     backends.  */
7061 	  if (rtx_cost (temp1, SET, optimize_this_for_speed_p)
7062 	      < rtx_cost (temp, SET, optimize_this_for_speed_p))
7063 	    temp = temp1;
7064 	}
7065       pos_rtx = temp;
7066     }
7067   else if (pos_rtx != 0
7068 	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
7069     pos_rtx = gen_lowpart (pos_mode, pos_rtx);
7070 
7071   /* Make POS_RTX unless we already have it and it is correct.  If we don't
7072      have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
7073      be a CONST_INT.  */
7074   if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7075     pos_rtx = orig_pos_rtx;
7076 
7077   else if (pos_rtx == 0)
7078     pos_rtx = GEN_INT (pos);
7079 
7080   /* Make the required operation.  See if we can use existing rtx.  */
7081   new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
7082 			 extraction_mode, inner, GEN_INT (len), pos_rtx);
7083   if (! in_dest)
7084     new_rtx = gen_lowpart (mode, new_rtx);
7085 
7086   return new_rtx;
7087 }
7088 
7089 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
7090    with any other operations in X.  Return X without that shift if so.  */
7091 
7092 static rtx
7093 extract_left_shift (rtx x, int count)
7094 {
7095   enum rtx_code code = GET_CODE (x);
7096   enum machine_mode mode = GET_MODE (x);
7097   rtx tem;
7098 
7099   switch (code)
7100     {
7101     case ASHIFT:
7102       /* This is the shift itself.  If it is wide enough, we will return
7103 	 either the value being shifted if the shift count is equal to
7104 	 COUNT or a shift for the difference.  */
7105       if (CONST_INT_P (XEXP (x, 1))
7106 	  && INTVAL (XEXP (x, 1)) >= count)
7107 	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
7108 				     INTVAL (XEXP (x, 1)) - count);
7109       break;
7110 
7111     case NEG:  case NOT:
7112       if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7113 	return simplify_gen_unary (code, mode, tem, mode);
7114 
7115       break;
7116 
7117     case PLUS:  case IOR:  case XOR:  case AND:
7118       /* If we can safely shift this constant and we find the inner shift,
7119 	 make a new operation.  */
7120       if (CONST_INT_P (XEXP (x, 1))
7121 	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
7122 	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7123 	return simplify_gen_binary (code, mode, tem,
7124 				    GEN_INT (INTVAL (XEXP (x, 1)) >> count));
7125 
7126       break;
7127 
7128     default:
7129       break;
7130     }
7131 
7132   return 0;
7133 }
7134 
7135 /* Look at the expression rooted at X.  Look for expressions
7136    equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
7137    Form these expressions.
7138 
7139    Return the new rtx, usually just X.
7140 
7141    Also, for machines like the VAX that don't have logical shift insns,
7142    try to convert logical to arithmetic shift operations in cases where
7143    they are equivalent.  This undoes the canonicalizations to logical
7144    shifts done elsewhere.
7145 
7146    We try, as much as possible, to re-use rtl expressions to save memory.
7147 
7148    IN_CODE says what kind of expression we are processing.  Normally, it is
7149    SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
7150    being kludges), it is MEM.  When processing the arguments of a comparison
7151    or a COMPARE against zero, it is COMPARE.  */
7152 
7153 static rtx
7154 make_compound_operation (rtx x, enum rtx_code in_code)
7155 {
7156   enum rtx_code code = GET_CODE (x);
7157   enum machine_mode mode = GET_MODE (x);
7158   int mode_width = GET_MODE_BITSIZE (mode);
7159   rtx rhs, lhs;
7160   enum rtx_code next_code;
7161   int i, j;
7162   rtx new_rtx = 0;
7163   rtx tem;
7164   const char *fmt;
7165 
7166   /* Select the code to be used in recursive calls.  Once we are inside an
7167      address, we stay there.  If we have a comparison, set to COMPARE,
7168      but once inside, go back to our default of SET.  */
7169 
7170   next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
7171 	       : ((code == COMPARE || COMPARISON_P (x))
7172 		  && XEXP (x, 1) == const0_rtx) ? COMPARE
7173 	       : in_code == COMPARE ? SET : in_code);
7174 
7175   /* Process depending on the code of this operation.  If NEW is set
7176      nonzero, it will be returned.  */
7177 
7178   switch (code)
7179     {
7180     case ASHIFT:
7181       /* Convert shifts by constants into multiplications if inside
7182 	 an address.  */
7183       if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
7184 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7185 	  && INTVAL (XEXP (x, 1)) >= 0)
7186 	{
7187 	  new_rtx = make_compound_operation (XEXP (x, 0), next_code);
7188 	  new_rtx = gen_rtx_MULT (mode, new_rtx,
7189 			      GEN_INT ((HOST_WIDE_INT) 1
7190 				       << INTVAL (XEXP (x, 1))));
7191 	}
7192       break;
7193 
7194     case AND:
7195       /* If the second operand is not a constant, we can't do anything
7196 	 with it.  */
7197       if (!CONST_INT_P (XEXP (x, 1)))
7198 	break;
7199 
7200       /* If the constant is a power of two minus one and the first operand
7201 	 is a logical right shift, make an extraction.  */
7202       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7203 	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7204 	{
7205 	  new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7206 	  new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1), i, 1,
7207 				 0, in_code == COMPARE);
7208 	}
7209 
7210       /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
7211       else if (GET_CODE (XEXP (x, 0)) == SUBREG
7212 	       && subreg_lowpart_p (XEXP (x, 0))
7213 	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
7214 	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7215 	{
7216 	  new_rtx = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
7217 					 next_code);
7218 	  new_rtx = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new_rtx, 0,
7219 				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
7220 				 0, in_code == COMPARE);
7221 	}
7222       /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
7223       else if ((GET_CODE (XEXP (x, 0)) == XOR
7224 		|| GET_CODE (XEXP (x, 0)) == IOR)
7225 	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
7226 	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
7227 	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7228 	{
7229 	  /* Apply the distributive law, and then try to make extractions.  */
7230 	  new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
7231 				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
7232 					     XEXP (x, 1)),
7233 				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
7234 					     XEXP (x, 1)));
7235 	  new_rtx = make_compound_operation (new_rtx, in_code);
7236 	}
7237 
7238       /* If we are have (and (rotate X C) M) and C is larger than the number
7239 	 of bits in M, this is an extraction.  */
7240 
7241       else if (GET_CODE (XEXP (x, 0)) == ROTATE
7242 	       && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7243 	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
7244 	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
7245 	{
7246 	  new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7247 	  new_rtx = make_extraction (mode, new_rtx,
7248 				 (GET_MODE_BITSIZE (mode)
7249 				  - INTVAL (XEXP (XEXP (x, 0), 1))),
7250 				 NULL_RTX, i, 1, 0, in_code == COMPARE);
7251 	}
7252 
7253       /* On machines without logical shifts, if the operand of the AND is
7254 	 a logical shift and our mask turns off all the propagated sign
7255 	 bits, we can replace the logical shift with an arithmetic shift.  */
7256       else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7257 	       && !have_insn_for (LSHIFTRT, mode)
7258 	       && have_insn_for (ASHIFTRT, mode)
7259 	       && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7260 	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7261 	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7262 	       && mode_width <= HOST_BITS_PER_WIDE_INT)
7263 	{
7264 	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7265 
7266 	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
7267 	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
7268 	    SUBST (XEXP (x, 0),
7269 		   gen_rtx_ASHIFTRT (mode,
7270 				     make_compound_operation
7271 				     (XEXP (XEXP (x, 0), 0), next_code),
7272 				     XEXP (XEXP (x, 0), 1)));
7273 	}
7274 
7275       /* If the constant is one less than a power of two, this might be
7276 	 representable by an extraction even if no shift is present.
7277 	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
7278 	 we are in a COMPARE.  */
7279       else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7280 	new_rtx = make_extraction (mode,
7281 			       make_compound_operation (XEXP (x, 0),
7282 							next_code),
7283 			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
7284 
7285       /* If we are in a comparison and this is an AND with a power of two,
7286 	 convert this into the appropriate bit extract.  */
7287       else if (in_code == COMPARE
7288 	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
7289 	new_rtx = make_extraction (mode,
7290 			       make_compound_operation (XEXP (x, 0),
7291 							next_code),
7292 			       i, NULL_RTX, 1, 1, 0, 1);
7293 
7294       break;
7295 
7296     case LSHIFTRT:
7297       /* If the sign bit is known to be zero, replace this with an
7298 	 arithmetic shift.  */
7299       if (have_insn_for (ASHIFTRT, mode)
7300 	  && ! have_insn_for (LSHIFTRT, mode)
7301 	  && mode_width <= HOST_BITS_PER_WIDE_INT
7302 	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
7303 	{
7304 	  new_rtx = gen_rtx_ASHIFTRT (mode,
7305 				  make_compound_operation (XEXP (x, 0),
7306 							   next_code),
7307 				  XEXP (x, 1));
7308 	  break;
7309 	}
7310 
7311       /* ... fall through ...  */
7312 
7313     case ASHIFTRT:
7314       lhs = XEXP (x, 0);
7315       rhs = XEXP (x, 1);
7316 
7317       /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
7318 	 this is a SIGN_EXTRACT.  */
7319       if (CONST_INT_P (rhs)
7320 	  && GET_CODE (lhs) == ASHIFT
7321 	  && CONST_INT_P (XEXP (lhs, 1))
7322 	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
7323 	  && INTVAL (rhs) < mode_width)
7324 	{
7325 	  new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
7326 	  new_rtx = make_extraction (mode, new_rtx,
7327 				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
7328 				 NULL_RTX, mode_width - INTVAL (rhs),
7329 				 code == LSHIFTRT, 0, in_code == COMPARE);
7330 	  break;
7331 	}
7332 
7333       /* See if we have operations between an ASHIFTRT and an ASHIFT.
7334 	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
7335 	 also do this for some cases of SIGN_EXTRACT, but it doesn't
7336 	 seem worth the effort; the case checked for occurs on Alpha.  */
7337 
7338       if (!OBJECT_P (lhs)
7339 	  && ! (GET_CODE (lhs) == SUBREG
7340 		&& (OBJECT_P (SUBREG_REG (lhs))))
7341 	  && CONST_INT_P (rhs)
7342 	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
7343 	  && INTVAL (rhs) < mode_width
7344 	  && (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0)
7345 	new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code),
7346 			       0, NULL_RTX, mode_width - INTVAL (rhs),
7347 			       code == LSHIFTRT, 0, in_code == COMPARE);
7348 
7349       break;
7350 
7351     case SUBREG:
7352       /* Call ourselves recursively on the inner expression.  If we are
7353 	 narrowing the object and it has a different RTL code from
7354 	 what it originally did, do this SUBREG as a force_to_mode.  */
7355 
7356       tem = make_compound_operation (SUBREG_REG (x), in_code);
7357 
7358       {
7359 	rtx simplified = simplify_subreg (mode, tem, GET_MODE (SUBREG_REG (x)),
7360 					  SUBREG_BYTE (x));
7361 
7362 	if (simplified)
7363 	  tem = simplified;
7364 
7365 	if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
7366 	    && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7367 	    && subreg_lowpart_p (x))
7368 	  {
7369 	    rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
7370 				       0);
7371 
7372 	    /* If we have something other than a SUBREG, we might have
7373 	       done an expansion, so rerun ourselves.  */
7374 	    if (GET_CODE (newer) != SUBREG)
7375 	      newer = make_compound_operation (newer, in_code);
7376 
7377 	    /* force_to_mode can expand compounds.  If it just re-expanded the
7378 	       compound use gen_lowpart instead to convert to the desired
7379 	       mode.  */
7380 	    if (rtx_equal_p (newer, x))
7381 	      return gen_lowpart (GET_MODE (x), tem);
7382 
7383 	    return newer;
7384 	  }
7385 
7386 	if (simplified)
7387 	  return tem;
7388       }
7389       break;
7390 
7391     default:
7392       break;
7393     }
7394 
7395   if (new_rtx)
7396     {
7397       x = gen_lowpart (mode, new_rtx);
7398       code = GET_CODE (x);
7399     }
7400 
7401   /* Now recursively process each operand of this operation.  */
7402   fmt = GET_RTX_FORMAT (code);
7403   for (i = 0; i < GET_RTX_LENGTH (code); i++)
7404     if (fmt[i] == 'e')
7405       {
7406 	new_rtx = make_compound_operation (XEXP (x, i), next_code);
7407 	SUBST (XEXP (x, i), new_rtx);
7408       }
7409     else if (fmt[i] == 'E')
7410       for (j = 0; j < XVECLEN (x, i); j++)
7411 	{
7412 	  new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
7413 	  SUBST (XVECEXP (x, i, j), new_rtx);
7414 	}
7415 
7416   /* If this is a commutative operation, the changes to the operands
7417      may have made it noncanonical.  */
7418   if (COMMUTATIVE_ARITH_P (x)
7419       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
7420     {
7421       tem = XEXP (x, 0);
7422       SUBST (XEXP (x, 0), XEXP (x, 1));
7423       SUBST (XEXP (x, 1), tem);
7424     }
7425 
7426   return x;
7427 }
7428 
7429 /* Given M see if it is a value that would select a field of bits
7430    within an item, but not the entire word.  Return -1 if not.
7431    Otherwise, return the starting position of the field, where 0 is the
7432    low-order bit.
7433 
7434    *PLEN is set to the length of the field.  */
7435 
7436 static int
7437 get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
7438 {
7439   /* Get the bit number of the first 1 bit from the right, -1 if none.  */
7440   int pos = exact_log2 (m & -m);
7441   int len = 0;
7442 
7443   if (pos >= 0)
7444     /* Now shift off the low-order zero bits and see if we have a
7445        power of two minus 1.  */
7446     len = exact_log2 ((m >> pos) + 1);
7447 
7448   if (len <= 0)
7449     pos = -1;
7450 
7451   *plen = len;
7452   return pos;
7453 }
7454 
7455 /* If X refers to a register that equals REG in value, replace these
7456    references with REG.  */
7457 static rtx
7458 canon_reg_for_combine (rtx x, rtx reg)
7459 {
7460   rtx op0, op1, op2;
7461   const char *fmt;
7462   int i;
7463   bool copied;
7464 
7465   enum rtx_code code = GET_CODE (x);
7466   switch (GET_RTX_CLASS (code))
7467     {
7468     case RTX_UNARY:
7469       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7470       if (op0 != XEXP (x, 0))
7471 	return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
7472 				   GET_MODE (reg));
7473       break;
7474 
7475     case RTX_BIN_ARITH:
7476     case RTX_COMM_ARITH:
7477       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7478       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7479       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7480 	return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
7481       break;
7482 
7483     case RTX_COMPARE:
7484     case RTX_COMM_COMPARE:
7485       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7486       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7487       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7488 	return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
7489 					GET_MODE (op0), op0, op1);
7490       break;
7491 
7492     case RTX_TERNARY:
7493     case RTX_BITFIELD_OPS:
7494       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7495       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7496       op2 = canon_reg_for_combine (XEXP (x, 2), reg);
7497       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
7498 	return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
7499 				     GET_MODE (op0), op0, op1, op2);
7500 
7501     case RTX_OBJ:
7502       if (REG_P (x))
7503 	{
7504 	  if (rtx_equal_p (get_last_value (reg), x)
7505 	      || rtx_equal_p (reg, get_last_value (x)))
7506 	    return reg;
7507 	  else
7508 	    break;
7509 	}
7510 
7511       /* fall through */
7512 
7513     default:
7514       fmt = GET_RTX_FORMAT (code);
7515       copied = false;
7516       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7517 	if (fmt[i] == 'e')
7518 	  {
7519 	    rtx op = canon_reg_for_combine (XEXP (x, i), reg);
7520 	    if (op != XEXP (x, i))
7521 	      {
7522 		if (!copied)
7523 		  {
7524 		    copied = true;
7525 		    x = copy_rtx (x);
7526 		  }
7527 		XEXP (x, i) = op;
7528 	      }
7529 	  }
7530 	else if (fmt[i] == 'E')
7531 	  {
7532 	    int j;
7533 	    for (j = 0; j < XVECLEN (x, i); j++)
7534 	      {
7535 		rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
7536 		if (op != XVECEXP (x, i, j))
7537 		  {
7538 		    if (!copied)
7539 		      {
7540 			copied = true;
7541 			x = copy_rtx (x);
7542 		      }
7543 		    XVECEXP (x, i, j) = op;
7544 		  }
7545 	      }
7546 	  }
7547 
7548       break;
7549     }
7550 
7551   return x;
7552 }
7553 
7554 /* Return X converted to MODE.  If the value is already truncated to
7555    MODE we can just return a subreg even though in the general case we
7556    would need an explicit truncation.  */
7557 
7558 static rtx
7559 gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
7560 {
7561   if (!CONST_INT_P (x)
7562       && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
7563       && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
7564 				 GET_MODE_BITSIZE (GET_MODE (x)))
7565       && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
7566     {
7567       /* Bit-cast X into an integer mode.  */
7568       if (!SCALAR_INT_MODE_P (GET_MODE (x)))
7569 	x = gen_lowpart (int_mode_for_mode (GET_MODE (x)), x);
7570       x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode),
7571 			      x, GET_MODE (x));
7572     }
7573 
7574   return gen_lowpart (mode, x);
7575 }
7576 
7577 /* See if X can be simplified knowing that we will only refer to it in
7578    MODE and will only refer to those bits that are nonzero in MASK.
7579    If other bits are being computed or if masking operations are done
7580    that select a superset of the bits in MASK, they can sometimes be
7581    ignored.
7582 
7583    Return a possibly simplified expression, but always convert X to
7584    MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
7585 
7586    If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
7587    are all off in X.  This is used when X will be complemented, by either
7588    NOT, NEG, or XOR.  */
7589 
7590 static rtx
7591 force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
7592 	       int just_select)
7593 {
7594   enum rtx_code code = GET_CODE (x);
7595   int next_select = just_select || code == XOR || code == NOT || code == NEG;
7596   enum machine_mode op_mode;
7597   unsigned HOST_WIDE_INT fuller_mask, nonzero;
7598   rtx op0, op1, temp;
7599 
7600   /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
7601      code below will do the wrong thing since the mode of such an
7602      expression is VOIDmode.
7603 
7604      Also do nothing if X is a CLOBBER; this can happen if X was
7605      the return value from a call to gen_lowpart.  */
7606   if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
7607     return x;
7608 
7609   /* We want to perform the operation is its present mode unless we know
7610      that the operation is valid in MODE, in which case we do the operation
7611      in MODE.  */
7612   op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
7613 	      && have_insn_for (code, mode))
7614 	     ? mode : GET_MODE (x));
7615 
7616   /* It is not valid to do a right-shift in a narrower mode
7617      than the one it came in with.  */
7618   if ((code == LSHIFTRT || code == ASHIFTRT)
7619       && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
7620     op_mode = GET_MODE (x);
7621 
7622   /* Truncate MASK to fit OP_MODE.  */
7623   if (op_mode)
7624     mask &= GET_MODE_MASK (op_mode);
7625 
7626   /* When we have an arithmetic operation, or a shift whose count we
7627      do not know, we need to assume that all bits up to the highest-order
7628      bit in MASK will be needed.  This is how we form such a mask.  */
7629   if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
7630     fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
7631   else
7632     fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
7633 		   - 1);
7634 
7635   /* Determine what bits of X are guaranteed to be (non)zero.  */
7636   nonzero = nonzero_bits (x, mode);
7637 
7638   /* If none of the bits in X are needed, return a zero.  */
7639   if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
7640     x = const0_rtx;
7641 
7642   /* If X is a CONST_INT, return a new one.  Do this here since the
7643      test below will fail.  */
7644   if (CONST_INT_P (x))
7645     {
7646       if (SCALAR_INT_MODE_P (mode))
7647 	return gen_int_mode (INTVAL (x) & mask, mode);
7648       else
7649 	{
7650 	  x = GEN_INT (INTVAL (x) & mask);
7651 	  return gen_lowpart_common (mode, x);
7652 	}
7653     }
7654 
7655   /* If X is narrower than MODE and we want all the bits in X's mode, just
7656      get X in the proper mode.  */
7657   if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
7658       && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
7659     return gen_lowpart (mode, x);
7660 
7661   /* We can ignore the effect of a SUBREG if it narrows the mode or
7662      if the constant masks to zero all the bits the mode doesn't have.  */
7663   if (GET_CODE (x) == SUBREG
7664       && subreg_lowpart_p (x)
7665       && ((GET_MODE_SIZE (GET_MODE (x))
7666 	   < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7667 	  || (0 == (mask
7668 		    & GET_MODE_MASK (GET_MODE (x))
7669 		    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
7670     return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
7671 
7672   /* The arithmetic simplifications here only work for scalar integer modes.  */
7673   if (!SCALAR_INT_MODE_P (mode) || !SCALAR_INT_MODE_P (GET_MODE (x)))
7674     return gen_lowpart_or_truncate (mode, x);
7675 
7676   switch (code)
7677     {
7678     case CLOBBER:
7679       /* If X is a (clobber (const_int)), return it since we know we are
7680 	 generating something that won't match.  */
7681       return x;
7682 
7683     case SIGN_EXTEND:
7684     case ZERO_EXTEND:
7685     case ZERO_EXTRACT:
7686     case SIGN_EXTRACT:
7687       x = expand_compound_operation (x);
7688       if (GET_CODE (x) != code)
7689 	return force_to_mode (x, mode, mask, next_select);
7690       break;
7691 
7692     case TRUNCATE:
7693       /* Similarly for a truncate.  */
7694       return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7695 
7696     case AND:
7697       /* If this is an AND with a constant, convert it into an AND
7698 	 whose constant is the AND of that constant with MASK.  If it
7699 	 remains an AND of MASK, delete it since it is redundant.  */
7700 
7701       if (CONST_INT_P (XEXP (x, 1)))
7702 	{
7703 	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
7704 				      mask & INTVAL (XEXP (x, 1)));
7705 
7706 	  /* If X is still an AND, see if it is an AND with a mask that
7707 	     is just some low-order bits.  If so, and it is MASK, we don't
7708 	     need it.  */
7709 
7710 	  if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
7711 	      && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7712 		  == mask))
7713 	    x = XEXP (x, 0);
7714 
7715 	  /* If it remains an AND, try making another AND with the bits
7716 	     in the mode mask that aren't in MASK turned on.  If the
7717 	     constant in the AND is wide enough, this might make a
7718 	     cheaper constant.  */
7719 
7720 	  if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
7721 	      && GET_MODE_MASK (GET_MODE (x)) != mask
7722 	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7723 	    {
7724 	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7725 				    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7726 	      int width = GET_MODE_BITSIZE (GET_MODE (x));
7727 	      rtx y;
7728 
7729 	      /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
7730 		 number, sign extend it.  */
7731 	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7732 		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7733 		cval |= (HOST_WIDE_INT) -1 << width;
7734 
7735 	      y = simplify_gen_binary (AND, GET_MODE (x),
7736 				       XEXP (x, 0), GEN_INT (cval));
7737 	      if (rtx_cost (y, SET, optimize_this_for_speed_p)
7738 	          < rtx_cost (x, SET, optimize_this_for_speed_p))
7739 		x = y;
7740 	    }
7741 
7742 	  break;
7743 	}
7744 
7745       goto binop;
7746 
7747     case PLUS:
7748       /* In (and (plus FOO C1) M), if M is a mask that just turns off
7749 	 low-order bits (as in an alignment operation) and FOO is already
7750 	 aligned to that boundary, mask C1 to that boundary as well.
7751 	 This may eliminate that PLUS and, later, the AND.  */
7752 
7753       {
7754 	unsigned int width = GET_MODE_BITSIZE (mode);
7755 	unsigned HOST_WIDE_INT smask = mask;
7756 
7757 	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7758 	   number, sign extend it.  */
7759 
7760 	if (width < HOST_BITS_PER_WIDE_INT
7761 	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7762 	  smask |= (HOST_WIDE_INT) -1 << width;
7763 
7764 	if (CONST_INT_P (XEXP (x, 1))
7765 	    && exact_log2 (- smask) >= 0
7766 	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7767 	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7768 	  return force_to_mode (plus_constant (XEXP (x, 0),
7769 					       (INTVAL (XEXP (x, 1)) & smask)),
7770 				mode, smask, next_select);
7771       }
7772 
7773       /* ... fall through ...  */
7774 
7775     case MULT:
7776       /* For PLUS, MINUS and MULT, we need any bits less significant than the
7777 	 most significant bit in MASK since carries from those bits will
7778 	 affect the bits we are interested in.  */
7779       mask = fuller_mask;
7780       goto binop;
7781 
7782     case MINUS:
7783       /* If X is (minus C Y) where C's least set bit is larger than any bit
7784 	 in the mask, then we may replace with (neg Y).  */
7785       if (CONST_INT_P (XEXP (x, 0))
7786 	  && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7787 					& -INTVAL (XEXP (x, 0))))
7788 	      > mask))
7789 	{
7790 	  x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7791 				  GET_MODE (x));
7792 	  return force_to_mode (x, mode, mask, next_select);
7793 	}
7794 
7795       /* Similarly, if C contains every bit in the fuller_mask, then we may
7796 	 replace with (not Y).  */
7797       if (CONST_INT_P (XEXP (x, 0))
7798 	  && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7799 	      == INTVAL (XEXP (x, 0))))
7800 	{
7801 	  x = simplify_gen_unary (NOT, GET_MODE (x),
7802 				  XEXP (x, 1), GET_MODE (x));
7803 	  return force_to_mode (x, mode, mask, next_select);
7804 	}
7805 
7806       mask = fuller_mask;
7807       goto binop;
7808 
7809     case IOR:
7810     case XOR:
7811       /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7812 	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7813 	 operation which may be a bitfield extraction.  Ensure that the
7814 	 constant we form is not wider than the mode of X.  */
7815 
7816       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7817 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7818 	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7819 	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7820 	  && CONST_INT_P (XEXP (x, 1))
7821 	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
7822 	       + floor_log2 (INTVAL (XEXP (x, 1))))
7823 	      < GET_MODE_BITSIZE (GET_MODE (x)))
7824 	  && (INTVAL (XEXP (x, 1))
7825 	      & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7826 	{
7827 	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7828 			  << INTVAL (XEXP (XEXP (x, 0), 1)));
7829 	  temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7830 				      XEXP (XEXP (x, 0), 0), temp);
7831 	  x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7832 				   XEXP (XEXP (x, 0), 1));
7833 	  return force_to_mode (x, mode, mask, next_select);
7834 	}
7835 
7836     binop:
7837       /* For most binary operations, just propagate into the operation and
7838 	 change the mode if we have an operation of that mode.  */
7839 
7840       op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
7841       op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
7842 
7843       /* If we ended up truncating both operands, truncate the result of the
7844 	 operation instead.  */
7845       if (GET_CODE (op0) == TRUNCATE
7846 	  && GET_CODE (op1) == TRUNCATE)
7847 	{
7848 	  op0 = XEXP (op0, 0);
7849 	  op1 = XEXP (op1, 0);
7850 	}
7851 
7852       op0 = gen_lowpart_or_truncate (op_mode, op0);
7853       op1 = gen_lowpart_or_truncate (op_mode, op1);
7854 
7855       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7856 	x = simplify_gen_binary (code, op_mode, op0, op1);
7857       break;
7858 
7859     case ASHIFT:
7860       /* For left shifts, do the same, but just for the first operand.
7861 	 However, we cannot do anything with shifts where we cannot
7862 	 guarantee that the counts are smaller than the size of the mode
7863 	 because such a count will have a different meaning in a
7864 	 wider mode.  */
7865 
7866       if (! (CONST_INT_P (XEXP (x, 1))
7867 	     && INTVAL (XEXP (x, 1)) >= 0
7868 	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7869 	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7870 		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7871 		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7872 	break;
7873 
7874       /* If the shift count is a constant and we can do arithmetic in
7875 	 the mode of the shift, refine which bits we need.  Otherwise, use the
7876 	 conservative form of the mask.  */
7877       if (CONST_INT_P (XEXP (x, 1))
7878 	  && INTVAL (XEXP (x, 1)) >= 0
7879 	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7880 	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7881 	mask >>= INTVAL (XEXP (x, 1));
7882       else
7883 	mask = fuller_mask;
7884 
7885       op0 = gen_lowpart_or_truncate (op_mode,
7886 				     force_to_mode (XEXP (x, 0), op_mode,
7887 						    mask, next_select));
7888 
7889       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7890 	x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7891       break;
7892 
7893     case LSHIFTRT:
7894       /* Here we can only do something if the shift count is a constant,
7895 	 this shift constant is valid for the host, and we can do arithmetic
7896 	 in OP_MODE.  */
7897 
7898       if (CONST_INT_P (XEXP (x, 1))
7899 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7900 	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7901 	{
7902 	  rtx inner = XEXP (x, 0);
7903 	  unsigned HOST_WIDE_INT inner_mask;
7904 
7905 	  /* Select the mask of the bits we need for the shift operand.  */
7906 	  inner_mask = mask << INTVAL (XEXP (x, 1));
7907 
7908 	  /* We can only change the mode of the shift if we can do arithmetic
7909 	     in the mode of the shift and INNER_MASK is no wider than the
7910 	     width of X's mode.  */
7911 	  if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7912 	    op_mode = GET_MODE (x);
7913 
7914 	  inner = force_to_mode (inner, op_mode, inner_mask, next_select);
7915 
7916 	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7917 	    x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7918 	}
7919 
7920       /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7921 	 shift and AND produces only copies of the sign bit (C2 is one less
7922 	 than a power of two), we can do this with just a shift.  */
7923 
7924       if (GET_CODE (x) == LSHIFTRT
7925 	  && CONST_INT_P (XEXP (x, 1))
7926 	  /* The shift puts one of the sign bit copies in the least significant
7927 	     bit.  */
7928 	  && ((INTVAL (XEXP (x, 1))
7929 	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7930 	      >= GET_MODE_BITSIZE (GET_MODE (x)))
7931 	  && exact_log2 (mask + 1) >= 0
7932 	  /* Number of bits left after the shift must be more than the mask
7933 	     needs.  */
7934 	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7935 	      <= GET_MODE_BITSIZE (GET_MODE (x)))
7936 	  /* Must be more sign bit copies than the mask needs.  */
7937 	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7938 	      >= exact_log2 (mask + 1)))
7939 	x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7940 				 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7941 					  - exact_log2 (mask + 1)));
7942 
7943       goto shiftrt;
7944 
7945     case ASHIFTRT:
7946       /* If we are just looking for the sign bit, we don't need this shift at
7947 	 all, even if it has a variable count.  */
7948       if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7949 	  && (mask == ((unsigned HOST_WIDE_INT) 1
7950 		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7951 	return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7952 
7953       /* If this is a shift by a constant, get a mask that contains those bits
7954 	 that are not copies of the sign bit.  We then have two cases:  If
7955 	 MASK only includes those bits, this can be a logical shift, which may
7956 	 allow simplifications.  If MASK is a single-bit field not within
7957 	 those bits, we are requesting a copy of the sign bit and hence can
7958 	 shift the sign bit to the appropriate location.  */
7959 
7960       if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
7961 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7962 	{
7963 	  int i;
7964 
7965 	  /* If the considered data is wider than HOST_WIDE_INT, we can't
7966 	     represent a mask for all its bits in a single scalar.
7967 	     But we only care about the lower bits, so calculate these.  */
7968 
7969 	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7970 	    {
7971 	      nonzero = ~(HOST_WIDE_INT) 0;
7972 
7973 	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7974 		 is the number of bits a full-width mask would have set.
7975 		 We need only shift if these are fewer than nonzero can
7976 		 hold.  If not, we must keep all bits set in nonzero.  */
7977 
7978 	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7979 		  < HOST_BITS_PER_WIDE_INT)
7980 		nonzero >>= INTVAL (XEXP (x, 1))
7981 			    + HOST_BITS_PER_WIDE_INT
7982 			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
7983 	    }
7984 	  else
7985 	    {
7986 	      nonzero = GET_MODE_MASK (GET_MODE (x));
7987 	      nonzero >>= INTVAL (XEXP (x, 1));
7988 	    }
7989 
7990 	  if ((mask & ~nonzero) == 0)
7991 	    {
7992 	      x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x),
7993 					XEXP (x, 0), INTVAL (XEXP (x, 1)));
7994 	      if (GET_CODE (x) != ASHIFTRT)
7995 		return force_to_mode (x, mode, mask, next_select);
7996 	    }
7997 
7998 	  else if ((i = exact_log2 (mask)) >= 0)
7999 	    {
8000 	      x = simplify_shift_const
8001 		  (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
8002 		   GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
8003 
8004 	      if (GET_CODE (x) != ASHIFTRT)
8005 		return force_to_mode (x, mode, mask, next_select);
8006 	    }
8007 	}
8008 
8009       /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
8010 	 even if the shift count isn't a constant.  */
8011       if (mask == 1)
8012 	x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8013 				 XEXP (x, 0), XEXP (x, 1));
8014 
8015     shiftrt:
8016 
8017       /* If this is a zero- or sign-extension operation that just affects bits
8018 	 we don't care about, remove it.  Be sure the call above returned
8019 	 something that is still a shift.  */
8020 
8021       if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
8022 	  && CONST_INT_P (XEXP (x, 1))
8023 	  && INTVAL (XEXP (x, 1)) >= 0
8024 	  && (INTVAL (XEXP (x, 1))
8025 	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
8026 	  && GET_CODE (XEXP (x, 0)) == ASHIFT
8027 	  && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
8028 	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
8029 			      next_select);
8030 
8031       break;
8032 
8033     case ROTATE:
8034     case ROTATERT:
8035       /* If the shift count is constant and we can do computations
8036 	 in the mode of X, compute where the bits we care about are.
8037 	 Otherwise, we can't do anything.  Don't change the mode of
8038 	 the shift or propagate MODE into the shift, though.  */
8039       if (CONST_INT_P (XEXP (x, 1))
8040 	  && INTVAL (XEXP (x, 1)) >= 0)
8041 	{
8042 	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
8043 					    GET_MODE (x), GEN_INT (mask),
8044 					    XEXP (x, 1));
8045 	  if (temp && CONST_INT_P (temp))
8046 	    SUBST (XEXP (x, 0),
8047 		   force_to_mode (XEXP (x, 0), GET_MODE (x),
8048 				  INTVAL (temp), next_select));
8049 	}
8050       break;
8051 
8052     case NEG:
8053       /* If we just want the low-order bit, the NEG isn't needed since it
8054 	 won't change the low-order bit.  */
8055       if (mask == 1)
8056 	return force_to_mode (XEXP (x, 0), mode, mask, just_select);
8057 
8058       /* We need any bits less significant than the most significant bit in
8059 	 MASK since carries from those bits will affect the bits we are
8060 	 interested in.  */
8061       mask = fuller_mask;
8062       goto unop;
8063 
8064     case NOT:
8065       /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
8066 	 same as the XOR case above.  Ensure that the constant we form is not
8067 	 wider than the mode of X.  */
8068 
8069       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8070 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8071 	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8072 	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
8073 	      < GET_MODE_BITSIZE (GET_MODE (x)))
8074 	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8075 	{
8076 	  temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
8077 			       GET_MODE (x));
8078 	  temp = simplify_gen_binary (XOR, GET_MODE (x),
8079 				      XEXP (XEXP (x, 0), 0), temp);
8080 	  x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8081 				   temp, XEXP (XEXP (x, 0), 1));
8082 
8083 	  return force_to_mode (x, mode, mask, next_select);
8084 	}
8085 
8086       /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
8087 	 use the full mask inside the NOT.  */
8088       mask = fuller_mask;
8089 
8090     unop:
8091       op0 = gen_lowpart_or_truncate (op_mode,
8092 				     force_to_mode (XEXP (x, 0), mode, mask,
8093 						    next_select));
8094       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
8095 	x = simplify_gen_unary (code, op_mode, op0, op_mode);
8096       break;
8097 
8098     case NE:
8099       /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
8100 	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
8101 	 which is equal to STORE_FLAG_VALUE.  */
8102       if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
8103 	  && GET_MODE (XEXP (x, 0)) == mode
8104 	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
8105 	  && (nonzero_bits (XEXP (x, 0), mode)
8106 	      == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
8107 	return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8108 
8109       break;
8110 
8111     case IF_THEN_ELSE:
8112       /* We have no way of knowing if the IF_THEN_ELSE can itself be
8113 	 written in a narrower mode.  We play it safe and do not do so.  */
8114 
8115       SUBST (XEXP (x, 1),
8116 	     gen_lowpart_or_truncate (GET_MODE (x),
8117 				      force_to_mode (XEXP (x, 1), mode,
8118 						     mask, next_select)));
8119       SUBST (XEXP (x, 2),
8120 	     gen_lowpart_or_truncate (GET_MODE (x),
8121 				      force_to_mode (XEXP (x, 2), mode,
8122 						     mask, next_select)));
8123       break;
8124 
8125     default:
8126       break;
8127     }
8128 
8129   /* Ensure we return a value of the proper mode.  */
8130   return gen_lowpart_or_truncate (mode, x);
8131 }
8132 
8133 /* Return nonzero if X is an expression that has one of two values depending on
8134    whether some other value is zero or nonzero.  In that case, we return the
8135    value that is being tested, *PTRUE is set to the value if the rtx being
8136    returned has a nonzero value, and *PFALSE is set to the other alternative.
8137 
8138    If we return zero, we set *PTRUE and *PFALSE to X.  */
8139 
8140 static rtx
8141 if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
8142 {
8143   enum machine_mode mode = GET_MODE (x);
8144   enum rtx_code code = GET_CODE (x);
8145   rtx cond0, cond1, true0, true1, false0, false1;
8146   unsigned HOST_WIDE_INT nz;
8147 
8148   /* If we are comparing a value against zero, we are done.  */
8149   if ((code == NE || code == EQ)
8150       && XEXP (x, 1) == const0_rtx)
8151     {
8152       *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
8153       *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
8154       return XEXP (x, 0);
8155     }
8156 
8157   /* If this is a unary operation whose operand has one of two values, apply
8158      our opcode to compute those values.  */
8159   else if (UNARY_P (x)
8160 	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
8161     {
8162       *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
8163       *pfalse = simplify_gen_unary (code, mode, false0,
8164 				    GET_MODE (XEXP (x, 0)));
8165       return cond0;
8166     }
8167 
8168   /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
8169      make can't possibly match and would suppress other optimizations.  */
8170   else if (code == COMPARE)
8171     ;
8172 
8173   /* If this is a binary operation, see if either side has only one of two
8174      values.  If either one does or if both do and they are conditional on
8175      the same value, compute the new true and false values.  */
8176   else if (BINARY_P (x))
8177     {
8178       cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
8179       cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
8180 
8181       if ((cond0 != 0 || cond1 != 0)
8182 	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
8183 	{
8184 	  /* If if_then_else_cond returned zero, then true/false are the
8185 	     same rtl.  We must copy one of them to prevent invalid rtl
8186 	     sharing.  */
8187 	  if (cond0 == 0)
8188 	    true0 = copy_rtx (true0);
8189 	  else if (cond1 == 0)
8190 	    true1 = copy_rtx (true1);
8191 
8192 	  if (COMPARISON_P (x))
8193 	    {
8194 	      *ptrue = simplify_gen_relational (code, mode, VOIDmode,
8195 						true0, true1);
8196 	      *pfalse = simplify_gen_relational (code, mode, VOIDmode,
8197 						 false0, false1);
8198 	     }
8199 	  else
8200 	    {
8201 	      *ptrue = simplify_gen_binary (code, mode, true0, true1);
8202 	      *pfalse = simplify_gen_binary (code, mode, false0, false1);
8203 	    }
8204 
8205 	  return cond0 ? cond0 : cond1;
8206 	}
8207 
8208       /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
8209 	 operands is zero when the other is nonzero, and vice-versa,
8210 	 and STORE_FLAG_VALUE is 1 or -1.  */
8211 
8212       if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8213 	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
8214 	      || code == UMAX)
8215 	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8216 	{
8217 	  rtx op0 = XEXP (XEXP (x, 0), 1);
8218 	  rtx op1 = XEXP (XEXP (x, 1), 1);
8219 
8220 	  cond0 = XEXP (XEXP (x, 0), 0);
8221 	  cond1 = XEXP (XEXP (x, 1), 0);
8222 
8223 	  if (COMPARISON_P (cond0)
8224 	      && COMPARISON_P (cond1)
8225 	      && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8226 		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8227 		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8228 		  || ((swap_condition (GET_CODE (cond0))
8229 		       == reversed_comparison_code (cond1, NULL))
8230 		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8231 		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8232 	      && ! side_effects_p (x))
8233 	    {
8234 	      *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
8235 	      *pfalse = simplify_gen_binary (MULT, mode,
8236 					     (code == MINUS
8237 					      ? simplify_gen_unary (NEG, mode,
8238 								    op1, mode)
8239 					      : op1),
8240 					      const_true_rtx);
8241 	      return cond0;
8242 	    }
8243 	}
8244 
8245       /* Similarly for MULT, AND and UMIN, except that for these the result
8246 	 is always zero.  */
8247       if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8248 	  && (code == MULT || code == AND || code == UMIN)
8249 	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8250 	{
8251 	  cond0 = XEXP (XEXP (x, 0), 0);
8252 	  cond1 = XEXP (XEXP (x, 1), 0);
8253 
8254 	  if (COMPARISON_P (cond0)
8255 	      && COMPARISON_P (cond1)
8256 	      && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8257 		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8258 		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8259 		  || ((swap_condition (GET_CODE (cond0))
8260 		       == reversed_comparison_code (cond1, NULL))
8261 		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8262 		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8263 	      && ! side_effects_p (x))
8264 	    {
8265 	      *ptrue = *pfalse = const0_rtx;
8266 	      return cond0;
8267 	    }
8268 	}
8269     }
8270 
8271   else if (code == IF_THEN_ELSE)
8272     {
8273       /* If we have IF_THEN_ELSE already, extract the condition and
8274 	 canonicalize it if it is NE or EQ.  */
8275       cond0 = XEXP (x, 0);
8276       *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
8277       if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
8278 	return XEXP (cond0, 0);
8279       else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
8280 	{
8281 	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
8282 	  return XEXP (cond0, 0);
8283 	}
8284       else
8285 	return cond0;
8286     }
8287 
8288   /* If X is a SUBREG, we can narrow both the true and false values
8289      if the inner expression, if there is a condition.  */
8290   else if (code == SUBREG
8291 	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
8292 					       &true0, &false0)))
8293     {
8294       true0 = simplify_gen_subreg (mode, true0,
8295 				   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8296       false0 = simplify_gen_subreg (mode, false0,
8297 				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8298       if (true0 && false0)
8299 	{
8300 	  *ptrue = true0;
8301 	  *pfalse = false0;
8302 	  return cond0;
8303 	}
8304     }
8305 
8306   /* If X is a constant, this isn't special and will cause confusions
8307      if we treat it as such.  Likewise if it is equivalent to a constant.  */
8308   else if (CONSTANT_P (x)
8309 	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
8310     ;
8311 
8312   /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
8313      will be least confusing to the rest of the compiler.  */
8314   else if (mode == BImode)
8315     {
8316       *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
8317       return x;
8318     }
8319 
8320   /* If X is known to be either 0 or -1, those are the true and
8321      false values when testing X.  */
8322   else if (x == constm1_rtx || x == const0_rtx
8323 	   || (mode != VOIDmode
8324 	       && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
8325     {
8326       *ptrue = constm1_rtx, *pfalse = const0_rtx;
8327       return x;
8328     }
8329 
8330   /* Likewise for 0 or a single bit.  */
8331   else if (SCALAR_INT_MODE_P (mode)
8332 	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8333 	   && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
8334     {
8335       *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
8336       return x;
8337     }
8338 
8339   /* Otherwise fail; show no condition with true and false values the same.  */
8340   *ptrue = *pfalse = x;
8341   return 0;
8342 }
8343 
8344 /* Return the value of expression X given the fact that condition COND
8345    is known to be true when applied to REG as its first operand and VAL
8346    as its second.  X is known to not be shared and so can be modified in
8347    place.
8348 
8349    We only handle the simplest cases, and specifically those cases that
8350    arise with IF_THEN_ELSE expressions.  */
8351 
8352 static rtx
8353 known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
8354 {
8355   enum rtx_code code = GET_CODE (x);
8356   rtx temp;
8357   const char *fmt;
8358   int i, j;
8359 
8360   if (side_effects_p (x))
8361     return x;
8362 
8363   /* If either operand of the condition is a floating point value,
8364      then we have to avoid collapsing an EQ comparison.  */
8365   if (cond == EQ
8366       && rtx_equal_p (x, reg)
8367       && ! FLOAT_MODE_P (GET_MODE (x))
8368       && ! FLOAT_MODE_P (GET_MODE (val)))
8369     return val;
8370 
8371   if (cond == UNEQ && rtx_equal_p (x, reg))
8372     return val;
8373 
8374   /* If X is (abs REG) and we know something about REG's relationship
8375      with zero, we may be able to simplify this.  */
8376 
8377   if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
8378     switch (cond)
8379       {
8380       case GE:  case GT:  case EQ:
8381 	return XEXP (x, 0);
8382       case LT:  case LE:
8383 	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
8384 				   XEXP (x, 0),
8385 				   GET_MODE (XEXP (x, 0)));
8386       default:
8387 	break;
8388       }
8389 
8390   /* The only other cases we handle are MIN, MAX, and comparisons if the
8391      operands are the same as REG and VAL.  */
8392 
8393   else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
8394     {
8395       if (rtx_equal_p (XEXP (x, 0), val))
8396 	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
8397 
8398       if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
8399 	{
8400 	  if (COMPARISON_P (x))
8401 	    {
8402 	      if (comparison_dominates_p (cond, code))
8403 		return const_true_rtx;
8404 
8405 	      code = reversed_comparison_code (x, NULL);
8406 	      if (code != UNKNOWN
8407 		  && comparison_dominates_p (cond, code))
8408 		return const0_rtx;
8409 	      else
8410 		return x;
8411 	    }
8412 	  else if (code == SMAX || code == SMIN
8413 		   || code == UMIN || code == UMAX)
8414 	    {
8415 	      int unsignedp = (code == UMIN || code == UMAX);
8416 
8417 	      /* Do not reverse the condition when it is NE or EQ.
8418 		 This is because we cannot conclude anything about
8419 		 the value of 'SMAX (x, y)' when x is not equal to y,
8420 		 but we can when x equals y.  */
8421 	      if ((code == SMAX || code == UMAX)
8422 		  && ! (cond == EQ || cond == NE))
8423 		cond = reverse_condition (cond);
8424 
8425 	      switch (cond)
8426 		{
8427 		case GE:   case GT:
8428 		  return unsignedp ? x : XEXP (x, 1);
8429 		case LE:   case LT:
8430 		  return unsignedp ? x : XEXP (x, 0);
8431 		case GEU:  case GTU:
8432 		  return unsignedp ? XEXP (x, 1) : x;
8433 		case LEU:  case LTU:
8434 		  return unsignedp ? XEXP (x, 0) : x;
8435 		default:
8436 		  break;
8437 		}
8438 	    }
8439 	}
8440     }
8441   else if (code == SUBREG)
8442     {
8443       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
8444       rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
8445 
8446       if (SUBREG_REG (x) != r)
8447 	{
8448 	  /* We must simplify subreg here, before we lose track of the
8449 	     original inner_mode.  */
8450 	  new_rtx = simplify_subreg (GET_MODE (x), r,
8451 				 inner_mode, SUBREG_BYTE (x));
8452 	  if (new_rtx)
8453 	    return new_rtx;
8454 	  else
8455 	    SUBST (SUBREG_REG (x), r);
8456 	}
8457 
8458       return x;
8459     }
8460   /* We don't have to handle SIGN_EXTEND here, because even in the
8461      case of replacing something with a modeless CONST_INT, a
8462      CONST_INT is already (supposed to be) a valid sign extension for
8463      its narrower mode, which implies it's already properly
8464      sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
8465      story is different.  */
8466   else if (code == ZERO_EXTEND)
8467     {
8468       enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
8469       rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
8470 
8471       if (XEXP (x, 0) != r)
8472 	{
8473 	  /* We must simplify the zero_extend here, before we lose
8474 	     track of the original inner_mode.  */
8475 	  new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
8476 					  r, inner_mode);
8477 	  if (new_rtx)
8478 	    return new_rtx;
8479 	  else
8480 	    SUBST (XEXP (x, 0), r);
8481 	}
8482 
8483       return x;
8484     }
8485 
8486   fmt = GET_RTX_FORMAT (code);
8487   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8488     {
8489       if (fmt[i] == 'e')
8490 	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
8491       else if (fmt[i] == 'E')
8492 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8493 	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
8494 						cond, reg, val));
8495     }
8496 
8497   return x;
8498 }
8499 
8500 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
8501    assignment as a field assignment.  */
8502 
8503 static int
8504 rtx_equal_for_field_assignment_p (rtx x, rtx y)
8505 {
8506   if (x == y || rtx_equal_p (x, y))
8507     return 1;
8508 
8509   if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
8510     return 0;
8511 
8512   /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
8513      Note that all SUBREGs of MEM are paradoxical; otherwise they
8514      would have been rewritten.  */
8515   if (MEM_P (x) && GET_CODE (y) == SUBREG
8516       && MEM_P (SUBREG_REG (y))
8517       && rtx_equal_p (SUBREG_REG (y),
8518 		      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
8519     return 1;
8520 
8521   if (MEM_P (y) && GET_CODE (x) == SUBREG
8522       && MEM_P (SUBREG_REG (x))
8523       && rtx_equal_p (SUBREG_REG (x),
8524 		      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
8525     return 1;
8526 
8527   /* We used to see if get_last_value of X and Y were the same but that's
8528      not correct.  In one direction, we'll cause the assignment to have
8529      the wrong destination and in the case, we'll import a register into this
8530      insn that might have already have been dead.   So fail if none of the
8531      above cases are true.  */
8532   return 0;
8533 }
8534 
8535 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
8536    Return that assignment if so.
8537 
8538    We only handle the most common cases.  */
8539 
8540 static rtx
8541 make_field_assignment (rtx x)
8542 {
8543   rtx dest = SET_DEST (x);
8544   rtx src = SET_SRC (x);
8545   rtx assign;
8546   rtx rhs, lhs;
8547   HOST_WIDE_INT c1;
8548   HOST_WIDE_INT pos;
8549   unsigned HOST_WIDE_INT len;
8550   rtx other;
8551   enum machine_mode mode;
8552 
8553   /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
8554      a clear of a one-bit field.  We will have changed it to
8555      (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
8556      for a SUBREG.  */
8557 
8558   if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
8559       && CONST_INT_P (XEXP (XEXP (src, 0), 0))
8560       && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
8561       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8562     {
8563       assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8564 				1, 1, 1, 0);
8565       if (assign != 0)
8566 	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
8567       return x;
8568     }
8569 
8570   if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
8571       && subreg_lowpart_p (XEXP (src, 0))
8572       && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
8573 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
8574       && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
8575       && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
8576       && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
8577       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8578     {
8579       assign = make_extraction (VOIDmode, dest, 0,
8580 				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
8581 				1, 1, 1, 0);
8582       if (assign != 0)
8583 	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
8584       return x;
8585     }
8586 
8587   /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
8588      one-bit field.  */
8589   if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
8590       && XEXP (XEXP (src, 0), 0) == const1_rtx
8591       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8592     {
8593       assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8594 				1, 1, 1, 0);
8595       if (assign != 0)
8596 	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
8597       return x;
8598     }
8599 
8600   /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
8601      SRC is an AND with all bits of that field set, then we can discard
8602      the AND.  */
8603   if (GET_CODE (dest) == ZERO_EXTRACT
8604       && CONST_INT_P (XEXP (dest, 1))
8605       && GET_CODE (src) == AND
8606       && CONST_INT_P (XEXP (src, 1)))
8607     {
8608       HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
8609       unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
8610       unsigned HOST_WIDE_INT ze_mask;
8611 
8612       if (width >= HOST_BITS_PER_WIDE_INT)
8613 	ze_mask = -1;
8614       else
8615 	ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
8616 
8617       /* Complete overlap.  We can remove the source AND.  */
8618       if ((and_mask & ze_mask) == ze_mask)
8619 	return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
8620 
8621       /* Partial overlap.  We can reduce the source AND.  */
8622       if ((and_mask & ze_mask) != and_mask)
8623 	{
8624 	  mode = GET_MODE (src);
8625 	  src = gen_rtx_AND (mode, XEXP (src, 0),
8626 			     gen_int_mode (and_mask & ze_mask, mode));
8627 	  return gen_rtx_SET (VOIDmode, dest, src);
8628 	}
8629     }
8630 
8631   /* The other case we handle is assignments into a constant-position
8632      field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
8633      a mask that has all one bits except for a group of zero bits and
8634      OTHER is known to have zeros where C1 has ones, this is such an
8635      assignment.  Compute the position and length from C1.  Shift OTHER
8636      to the appropriate position, force it to the required mode, and
8637      make the extraction.  Check for the AND in both operands.  */
8638 
8639   if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
8640     return x;
8641 
8642   rhs = expand_compound_operation (XEXP (src, 0));
8643   lhs = expand_compound_operation (XEXP (src, 1));
8644 
8645   if (GET_CODE (rhs) == AND
8646       && CONST_INT_P (XEXP (rhs, 1))
8647       && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
8648     c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
8649   else if (GET_CODE (lhs) == AND
8650 	   && CONST_INT_P (XEXP (lhs, 1))
8651 	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
8652     c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
8653   else
8654     return x;
8655 
8656   pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
8657   if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
8658       || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
8659       || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
8660     return x;
8661 
8662   assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
8663   if (assign == 0)
8664     return x;
8665 
8666   /* The mode to use for the source is the mode of the assignment, or of
8667      what is inside a possible STRICT_LOW_PART.  */
8668   mode = (GET_CODE (assign) == STRICT_LOW_PART
8669 	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
8670 
8671   /* Shift OTHER right POS places and make it the source, restricting it
8672      to the proper length and mode.  */
8673 
8674   src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
8675 						     GET_MODE (src),
8676 						     other, pos),
8677 			       dest);
8678   src = force_to_mode (src, mode,
8679 		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
8680 		       ? ~(unsigned HOST_WIDE_INT) 0
8681 		       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
8682 		       0);
8683 
8684   /* If SRC is masked by an AND that does not make a difference in
8685      the value being stored, strip it.  */
8686   if (GET_CODE (assign) == ZERO_EXTRACT
8687       && CONST_INT_P (XEXP (assign, 1))
8688       && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
8689       && GET_CODE (src) == AND
8690       && CONST_INT_P (XEXP (src, 1))
8691       && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
8692 	  == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
8693     src = XEXP (src, 0);
8694 
8695   return gen_rtx_SET (VOIDmode, assign, src);
8696 }
8697 
8698 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8699    if so.  */
8700 
8701 static rtx
8702 apply_distributive_law (rtx x)
8703 {
8704   enum rtx_code code = GET_CODE (x);
8705   enum rtx_code inner_code;
8706   rtx lhs, rhs, other;
8707   rtx tem;
8708 
8709   /* Distributivity is not true for floating point as it can change the
8710      value.  So we don't do it unless -funsafe-math-optimizations.  */
8711   if (FLOAT_MODE_P (GET_MODE (x))
8712       && ! flag_unsafe_math_optimizations)
8713     return x;
8714 
8715   /* The outer operation can only be one of the following:  */
8716   if (code != IOR && code != AND && code != XOR
8717       && code != PLUS && code != MINUS)
8718     return x;
8719 
8720   lhs = XEXP (x, 0);
8721   rhs = XEXP (x, 1);
8722 
8723   /* If either operand is a primitive we can't do anything, so get out
8724      fast.  */
8725   if (OBJECT_P (lhs) || OBJECT_P (rhs))
8726     return x;
8727 
8728   lhs = expand_compound_operation (lhs);
8729   rhs = expand_compound_operation (rhs);
8730   inner_code = GET_CODE (lhs);
8731   if (inner_code != GET_CODE (rhs))
8732     return x;
8733 
8734   /* See if the inner and outer operations distribute.  */
8735   switch (inner_code)
8736     {
8737     case LSHIFTRT:
8738     case ASHIFTRT:
8739     case AND:
8740     case IOR:
8741       /* These all distribute except over PLUS.  */
8742       if (code == PLUS || code == MINUS)
8743 	return x;
8744       break;
8745 
8746     case MULT:
8747       if (code != PLUS && code != MINUS)
8748 	return x;
8749       break;
8750 
8751     case ASHIFT:
8752       /* This is also a multiply, so it distributes over everything.  */
8753       break;
8754 
8755     case SUBREG:
8756       /* Non-paradoxical SUBREGs distributes over all operations,
8757 	 provided the inner modes and byte offsets are the same, this
8758 	 is an extraction of a low-order part, we don't convert an fp
8759 	 operation to int or vice versa, this is not a vector mode,
8760 	 and we would not be converting a single-word operation into a
8761 	 multi-word operation.  The latter test is not required, but
8762 	 it prevents generating unneeded multi-word operations.  Some
8763 	 of the previous tests are redundant given the latter test,
8764 	 but are retained because they are required for correctness.
8765 
8766 	 We produce the result slightly differently in this case.  */
8767 
8768       if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
8769 	  || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
8770 	  || ! subreg_lowpart_p (lhs)
8771 	  || (GET_MODE_CLASS (GET_MODE (lhs))
8772 	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
8773 	  || (GET_MODE_SIZE (GET_MODE (lhs))
8774 	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8775 	  || VECTOR_MODE_P (GET_MODE (lhs))
8776 	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD
8777 	  /* Result might need to be truncated.  Don't change mode if
8778 	     explicit truncation is needed.  */
8779 	  || !TRULY_NOOP_TRUNCATION
8780 	       (GET_MODE_BITSIZE (GET_MODE (x)),
8781 		GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs)))))
8782 	return x;
8783 
8784       tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8785 				 SUBREG_REG (lhs), SUBREG_REG (rhs));
8786       return gen_lowpart (GET_MODE (x), tem);
8787 
8788     default:
8789       return x;
8790     }
8791 
8792   /* Set LHS and RHS to the inner operands (A and B in the example
8793      above) and set OTHER to the common operand (C in the example).
8794      There is only one way to do this unless the inner operation is
8795      commutative.  */
8796   if (COMMUTATIVE_ARITH_P (lhs)
8797       && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8798     other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8799   else if (COMMUTATIVE_ARITH_P (lhs)
8800 	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8801     other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8802   else if (COMMUTATIVE_ARITH_P (lhs)
8803 	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8804     other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8805   else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8806     other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8807   else
8808     return x;
8809 
8810   /* Form the new inner operation, seeing if it simplifies first.  */
8811   tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8812 
8813   /* There is one exception to the general way of distributing:
8814      (a | c) ^ (b | c) -> (a ^ b) & ~c  */
8815   if (code == XOR && inner_code == IOR)
8816     {
8817       inner_code = AND;
8818       other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8819     }
8820 
8821   /* We may be able to continuing distributing the result, so call
8822      ourselves recursively on the inner operation before forming the
8823      outer operation, which we return.  */
8824   return simplify_gen_binary (inner_code, GET_MODE (x),
8825 			      apply_distributive_law (tem), other);
8826 }
8827 
8828 /* See if X is of the form (* (+ A B) C), and if so convert to
8829    (+ (* A C) (* B C)) and try to simplify.
8830 
8831    Most of the time, this results in no change.  However, if some of
8832    the operands are the same or inverses of each other, simplifications
8833    will result.
8834 
8835    For example, (and (ior A B) (not B)) can occur as the result of
8836    expanding a bit field assignment.  When we apply the distributive
8837    law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8838    which then simplifies to (and (A (not B))).
8839 
8840    Note that no checks happen on the validity of applying the inverse
8841    distributive law.  This is pointless since we can do it in the
8842    few places where this routine is called.
8843 
8844    N is the index of the term that is decomposed (the arithmetic operation,
8845    i.e. (+ A B) in the first example above).  !N is the index of the term that
8846    is distributed, i.e. of C in the first example above.  */
8847 static rtx
8848 distribute_and_simplify_rtx (rtx x, int n)
8849 {
8850   enum machine_mode mode;
8851   enum rtx_code outer_code, inner_code;
8852   rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8853 
8854   /* Distributivity is not true for floating point as it can change the
8855      value.  So we don't do it unless -funsafe-math-optimizations.  */
8856   if (FLOAT_MODE_P (GET_MODE (x))
8857       && ! flag_unsafe_math_optimizations)
8858     return NULL_RTX;
8859 
8860   decomposed = XEXP (x, n);
8861   if (!ARITHMETIC_P (decomposed))
8862     return NULL_RTX;
8863 
8864   mode = GET_MODE (x);
8865   outer_code = GET_CODE (x);
8866   distributed = XEXP (x, !n);
8867 
8868   inner_code = GET_CODE (decomposed);
8869   inner_op0 = XEXP (decomposed, 0);
8870   inner_op1 = XEXP (decomposed, 1);
8871 
8872   /* Special case (and (xor B C) (not A)), which is equivalent to
8873      (xor (ior A B) (ior A C))  */
8874   if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8875     {
8876       distributed = XEXP (distributed, 0);
8877       outer_code = IOR;
8878     }
8879 
8880   if (n == 0)
8881     {
8882       /* Distribute the second term.  */
8883       new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8884       new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8885     }
8886   else
8887     {
8888       /* Distribute the first term.  */
8889       new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8890       new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8891     }
8892 
8893   tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8894 						     new_op0, new_op1));
8895   if (GET_CODE (tmp) != outer_code
8896       && rtx_cost (tmp, SET, optimize_this_for_speed_p)
8897          < rtx_cost (x, SET, optimize_this_for_speed_p))
8898     return tmp;
8899 
8900   return NULL_RTX;
8901 }
8902 
8903 /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8904    in MODE.  Return an equivalent form, if different from (and VAROP
8905    (const_int CONSTOP)).  Otherwise, return NULL_RTX.  */
8906 
8907 static rtx
8908 simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
8909 			  unsigned HOST_WIDE_INT constop)
8910 {
8911   unsigned HOST_WIDE_INT nonzero;
8912   unsigned HOST_WIDE_INT orig_constop;
8913   rtx orig_varop;
8914   int i;
8915 
8916   orig_varop = varop;
8917   orig_constop = constop;
8918   if (GET_CODE (varop) == CLOBBER)
8919     return NULL_RTX;
8920 
8921   /* Simplify VAROP knowing that we will be only looking at some of the
8922      bits in it.
8923 
8924      Note by passing in CONSTOP, we guarantee that the bits not set in
8925      CONSTOP are not significant and will never be examined.  We must
8926      ensure that is the case by explicitly masking out those bits
8927      before returning.  */
8928   varop = force_to_mode (varop, mode, constop, 0);
8929 
8930   /* If VAROP is a CLOBBER, we will fail so return it.  */
8931   if (GET_CODE (varop) == CLOBBER)
8932     return varop;
8933 
8934   /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8935      to VAROP and return the new constant.  */
8936   if (CONST_INT_P (varop))
8937     return gen_int_mode (INTVAL (varop) & constop, mode);
8938 
8939   /* See what bits may be nonzero in VAROP.  Unlike the general case of
8940      a call to nonzero_bits, here we don't care about bits outside
8941      MODE.  */
8942 
8943   nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8944 
8945   /* Turn off all bits in the constant that are known to already be zero.
8946      Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8947      which is tested below.  */
8948 
8949   constop &= nonzero;
8950 
8951   /* If we don't have any bits left, return zero.  */
8952   if (constop == 0)
8953     return const0_rtx;
8954 
8955   /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8956      a power of two, we can replace this with an ASHIFT.  */
8957   if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8958       && (i = exact_log2 (constop)) >= 0)
8959     return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8960 
8961   /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8962      or XOR, then try to apply the distributive law.  This may eliminate
8963      operations if either branch can be simplified because of the AND.
8964      It may also make some cases more complex, but those cases probably
8965      won't match a pattern either with or without this.  */
8966 
8967   if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8968     return
8969       gen_lowpart
8970 	(mode,
8971 	 apply_distributive_law
8972 	 (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8973 			       simplify_and_const_int (NULL_RTX,
8974 						       GET_MODE (varop),
8975 						       XEXP (varop, 0),
8976 						       constop),
8977 			       simplify_and_const_int (NULL_RTX,
8978 						       GET_MODE (varop),
8979 						       XEXP (varop, 1),
8980 						       constop))));
8981 
8982   /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8983      the AND and see if one of the operands simplifies to zero.  If so, we
8984      may eliminate it.  */
8985 
8986   if (GET_CODE (varop) == PLUS
8987       && exact_log2 (constop + 1) >= 0)
8988     {
8989       rtx o0, o1;
8990 
8991       o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8992       o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8993       if (o0 == const0_rtx)
8994 	return o1;
8995       if (o1 == const0_rtx)
8996 	return o0;
8997     }
8998 
8999   /* Make a SUBREG if necessary.  If we can't make it, fail.  */
9000   varop = gen_lowpart (mode, varop);
9001   if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
9002     return NULL_RTX;
9003 
9004   /* If we are only masking insignificant bits, return VAROP.  */
9005   if (constop == nonzero)
9006     return varop;
9007 
9008   if (varop == orig_varop && constop == orig_constop)
9009     return NULL_RTX;
9010 
9011   /* Otherwise, return an AND.  */
9012   return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
9013 }
9014 
9015 
9016 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
9017    in MODE.
9018 
9019    Return an equivalent form, if different from X.  Otherwise, return X.  If
9020    X is zero, we are to always construct the equivalent form.  */
9021 
9022 static rtx
9023 simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
9024 			unsigned HOST_WIDE_INT constop)
9025 {
9026   rtx tem = simplify_and_const_int_1 (mode, varop, constop);
9027   if (tem)
9028     return tem;
9029 
9030   if (!x)
9031     x = simplify_gen_binary (AND, GET_MODE (varop), varop,
9032 			     gen_int_mode (constop, mode));
9033   if (GET_MODE (x) != mode)
9034     x = gen_lowpart (mode, x);
9035   return x;
9036 }
9037 
9038 /* Given a REG, X, compute which bits in X can be nonzero.
9039    We don't care about bits outside of those defined in MODE.
9040 
9041    For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
9042    a shift, AND, or zero_extract, we can do better.  */
9043 
9044 static rtx
9045 reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
9046 			      const_rtx known_x ATTRIBUTE_UNUSED,
9047 			      enum machine_mode known_mode ATTRIBUTE_UNUSED,
9048 			      unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
9049 			      unsigned HOST_WIDE_INT *nonzero)
9050 {
9051   rtx tem;
9052   reg_stat_type *rsp;
9053 
9054   /* If X is a register whose nonzero bits value is current, use it.
9055      Otherwise, if X is a register whose value we can find, use that
9056      value.  Otherwise, use the previously-computed global nonzero bits
9057      for this register.  */
9058 
9059   rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9060   if (rsp->last_set_value != 0
9061       && (rsp->last_set_mode == mode
9062 	  || (GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
9063 	      && GET_MODE_CLASS (mode) == MODE_INT))
9064       && ((rsp->last_set_label >= label_tick_ebb_start
9065 	   && rsp->last_set_label < label_tick)
9066 	  || (rsp->last_set_label == label_tick
9067               && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9068 	  || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9069 	      && REG_N_SETS (REGNO (x)) == 1
9070 	      && !REGNO_REG_SET_P
9071 	          (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9072     {
9073       *nonzero &= rsp->last_set_nonzero_bits;
9074       return NULL;
9075     }
9076 
9077   tem = get_last_value (x);
9078 
9079   if (tem)
9080     {
9081 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
9082       /* If X is narrower than MODE and TEM is a non-negative
9083 	 constant that would appear negative in the mode of X,
9084 	 sign-extend it for use in reg_nonzero_bits because some
9085 	 machines (maybe most) will actually do the sign-extension
9086 	 and this is the conservative approach.
9087 
9088 	 ??? For 2.5, try to tighten up the MD files in this regard
9089 	 instead of this kludge.  */
9090 
9091       if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
9092 	  && CONST_INT_P (tem)
9093 	  && INTVAL (tem) > 0
9094 	  && 0 != (INTVAL (tem)
9095 		   & ((HOST_WIDE_INT) 1
9096 		      << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9097 	tem = GEN_INT (INTVAL (tem)
9098 		       | ((HOST_WIDE_INT) (-1)
9099 			  << GET_MODE_BITSIZE (GET_MODE (x))));
9100 #endif
9101       return tem;
9102     }
9103   else if (nonzero_sign_valid && rsp->nonzero_bits)
9104     {
9105       unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
9106 
9107       if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
9108 	/* We don't know anything about the upper bits.  */
9109 	mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
9110       *nonzero &= mask;
9111     }
9112 
9113   return NULL;
9114 }
9115 
9116 /* Return the number of bits at the high-order end of X that are known to
9117    be equal to the sign bit.  X will be used in mode MODE; if MODE is
9118    VOIDmode, X will be used in its own mode.  The returned value  will always
9119    be between 1 and the number of bits in MODE.  */
9120 
9121 static rtx
9122 reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
9123 				     const_rtx known_x ATTRIBUTE_UNUSED,
9124 				     enum machine_mode known_mode
9125 				     ATTRIBUTE_UNUSED,
9126 				     unsigned int known_ret ATTRIBUTE_UNUSED,
9127 				     unsigned int *result)
9128 {
9129   rtx tem;
9130   reg_stat_type *rsp;
9131 
9132   rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9133   if (rsp->last_set_value != 0
9134       && rsp->last_set_mode == mode
9135       && ((rsp->last_set_label >= label_tick_ebb_start
9136 	   && rsp->last_set_label < label_tick)
9137 	  || (rsp->last_set_label == label_tick
9138               && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9139 	  || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9140 	      && REG_N_SETS (REGNO (x)) == 1
9141 	      && !REGNO_REG_SET_P
9142 	          (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9143     {
9144       *result = rsp->last_set_sign_bit_copies;
9145       return NULL;
9146     }
9147 
9148   tem = get_last_value (x);
9149   if (tem != 0)
9150     return tem;
9151 
9152   if (nonzero_sign_valid && rsp->sign_bit_copies != 0
9153       && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
9154     *result = rsp->sign_bit_copies;
9155 
9156   return NULL;
9157 }
9158 
9159 /* Return the number of "extended" bits there are in X, when interpreted
9160    as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
9161    unsigned quantities, this is the number of high-order zero bits.
9162    For signed quantities, this is the number of copies of the sign bit
9163    minus 1.  In both case, this function returns the number of "spare"
9164    bits.  For example, if two quantities for which this function returns
9165    at least 1 are added, the addition is known not to overflow.
9166 
9167    This function will always return 0 unless called during combine, which
9168    implies that it must be called from a define_split.  */
9169 
9170 unsigned int
9171 extended_count (const_rtx x, enum machine_mode mode, int unsignedp)
9172 {
9173   if (nonzero_sign_valid == 0)
9174     return 0;
9175 
9176   return (unsignedp
9177 	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9178 	     ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
9179 			       - floor_log2 (nonzero_bits (x, mode)))
9180 	     : 0)
9181 	  : num_sign_bit_copies (x, mode) - 1);
9182 }
9183 
9184 /* This function is called from `simplify_shift_const' to merge two
9185    outer operations.  Specifically, we have already found that we need
9186    to perform operation *POP0 with constant *PCONST0 at the outermost
9187    position.  We would now like to also perform OP1 with constant CONST1
9188    (with *POP0 being done last).
9189 
9190    Return 1 if we can do the operation and update *POP0 and *PCONST0 with
9191    the resulting operation.  *PCOMP_P is set to 1 if we would need to
9192    complement the innermost operand, otherwise it is unchanged.
9193 
9194    MODE is the mode in which the operation will be done.  No bits outside
9195    the width of this mode matter.  It is assumed that the width of this mode
9196    is smaller than or equal to HOST_BITS_PER_WIDE_INT.
9197 
9198    If *POP0 or OP1 are UNKNOWN, it means no operation is required.  Only NEG, PLUS,
9199    IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
9200    result is simply *PCONST0.
9201 
9202    If the resulting operation cannot be expressed as one operation, we
9203    return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
9204 
9205 static int
9206 merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
9207 {
9208   enum rtx_code op0 = *pop0;
9209   HOST_WIDE_INT const0 = *pconst0;
9210 
9211   const0 &= GET_MODE_MASK (mode);
9212   const1 &= GET_MODE_MASK (mode);
9213 
9214   /* If OP0 is an AND, clear unimportant bits in CONST1.  */
9215   if (op0 == AND)
9216     const1 &= const0;
9217 
9218   /* If OP0 or OP1 is UNKNOWN, this is easy.  Similarly if they are the same or
9219      if OP0 is SET.  */
9220 
9221   if (op1 == UNKNOWN || op0 == SET)
9222     return 1;
9223 
9224   else if (op0 == UNKNOWN)
9225     op0 = op1, const0 = const1;
9226 
9227   else if (op0 == op1)
9228     {
9229       switch (op0)
9230 	{
9231 	case AND:
9232 	  const0 &= const1;
9233 	  break;
9234 	case IOR:
9235 	  const0 |= const1;
9236 	  break;
9237 	case XOR:
9238 	  const0 ^= const1;
9239 	  break;
9240 	case PLUS:
9241 	  const0 += const1;
9242 	  break;
9243 	case NEG:
9244 	  op0 = UNKNOWN;
9245 	  break;
9246 	default:
9247 	  break;
9248 	}
9249     }
9250 
9251   /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
9252   else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9253     return 0;
9254 
9255   /* If the two constants aren't the same, we can't do anything.  The
9256      remaining six cases can all be done.  */
9257   else if (const0 != const1)
9258     return 0;
9259 
9260   else
9261     switch (op0)
9262       {
9263       case IOR:
9264 	if (op1 == AND)
9265 	  /* (a & b) | b == b */
9266 	  op0 = SET;
9267 	else /* op1 == XOR */
9268 	  /* (a ^ b) | b == a | b */
9269 	  {;}
9270 	break;
9271 
9272       case XOR:
9273 	if (op1 == AND)
9274 	  /* (a & b) ^ b == (~a) & b */
9275 	  op0 = AND, *pcomp_p = 1;
9276 	else /* op1 == IOR */
9277 	  /* (a | b) ^ b == a & ~b */
9278 	  op0 = AND, const0 = ~const0;
9279 	break;
9280 
9281       case AND:
9282 	if (op1 == IOR)
9283 	  /* (a | b) & b == b */
9284 	op0 = SET;
9285 	else /* op1 == XOR */
9286 	  /* (a ^ b) & b) == (~a) & b */
9287 	  *pcomp_p = 1;
9288 	break;
9289       default:
9290 	break;
9291       }
9292 
9293   /* Check for NO-OP cases.  */
9294   const0 &= GET_MODE_MASK (mode);
9295   if (const0 == 0
9296       && (op0 == IOR || op0 == XOR || op0 == PLUS))
9297     op0 = UNKNOWN;
9298   else if (const0 == 0 && op0 == AND)
9299     op0 = SET;
9300   else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9301 	   && op0 == AND)
9302     op0 = UNKNOWN;
9303 
9304   *pop0 = op0;
9305 
9306   /* ??? Slightly redundant with the above mask, but not entirely.
9307      Moving this above means we'd have to sign-extend the mode mask
9308      for the final test.  */
9309   if (op0 != UNKNOWN && op0 != NEG)
9310     *pconst0 = trunc_int_for_mode (const0, mode);
9311 
9312   return 1;
9313 }
9314 
9315 /* A helper to simplify_shift_const_1 to determine the mode we can perform
9316    the shift in.  The original shift operation CODE is performed on OP in
9317    ORIG_MODE.  Return the wider mode MODE if we can perform the operation
9318    in that mode.  Return ORIG_MODE otherwise.  We can also assume that the
9319    result of the shift is subject to operation OUTER_CODE with operand
9320    OUTER_CONST.  */
9321 
9322 static enum machine_mode
9323 try_widen_shift_mode (enum rtx_code code, rtx op, int count,
9324 		      enum machine_mode orig_mode, enum machine_mode mode,
9325 		      enum rtx_code outer_code, HOST_WIDE_INT outer_const)
9326 {
9327   if (orig_mode == mode)
9328     return mode;
9329   gcc_assert (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (orig_mode));
9330 
9331   /* In general we can't perform in wider mode for right shift and rotate.  */
9332   switch (code)
9333     {
9334     case ASHIFTRT:
9335       /* We can still widen if the bits brought in from the left are identical
9336 	 to the sign bit of ORIG_MODE.  */
9337       if (num_sign_bit_copies (op, mode)
9338 	  > (unsigned) (GET_MODE_BITSIZE (mode)
9339 			- GET_MODE_BITSIZE (orig_mode)))
9340 	return mode;
9341       return orig_mode;
9342 
9343     case LSHIFTRT:
9344       /* Similarly here but with zero bits.  */
9345       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9346 	  && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
9347 	return mode;
9348 
9349       /* We can also widen if the bits brought in will be masked off.  This
9350 	 operation is performed in ORIG_MODE.  */
9351       if (outer_code == AND)
9352 	{
9353 	  int care_bits = low_bitmask_len (orig_mode, outer_const);
9354 
9355 	  if (care_bits >= 0
9356 	      && GET_MODE_BITSIZE (orig_mode) - care_bits >= count)
9357 	    return mode;
9358 	}
9359       /* fall through */
9360 
9361     case ROTATE:
9362       return orig_mode;
9363 
9364     case ROTATERT:
9365       gcc_unreachable ();
9366 
9367     default:
9368       return mode;
9369     }
9370 }
9371 
9372 /* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9373    The result of the shift is RESULT_MODE.  Return NULL_RTX if we cannot
9374    simplify it.  Otherwise, return a simplified value.
9375 
9376    The shift is normally computed in the widest mode we find in VAROP, as
9377    long as it isn't a different number of words than RESULT_MODE.  Exceptions
9378    are ASHIFTRT and ROTATE, which are always done in their original mode.  */
9379 
9380 static rtx
9381 simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
9382 			rtx varop, int orig_count)
9383 {
9384   enum rtx_code orig_code = code;
9385   rtx orig_varop = varop;
9386   int count;
9387   enum machine_mode mode = result_mode;
9388   enum machine_mode shift_mode, tmode;
9389   unsigned int mode_words
9390     = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9391   /* We form (outer_op (code varop count) (outer_const)).  */
9392   enum rtx_code outer_op = UNKNOWN;
9393   HOST_WIDE_INT outer_const = 0;
9394   int complement_p = 0;
9395   rtx new_rtx, x;
9396 
9397   /* Make sure and truncate the "natural" shift on the way in.  We don't
9398      want to do this inside the loop as it makes it more difficult to
9399      combine shifts.  */
9400   if (SHIFT_COUNT_TRUNCATED)
9401     orig_count &= GET_MODE_BITSIZE (mode) - 1;
9402 
9403   /* If we were given an invalid count, don't do anything except exactly
9404      what was requested.  */
9405 
9406   if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9407     return NULL_RTX;
9408 
9409   count = orig_count;
9410 
9411   /* Unless one of the branches of the `if' in this loop does a `continue',
9412      we will `break' the loop after the `if'.  */
9413 
9414   while (count != 0)
9415     {
9416       /* If we have an operand of (clobber (const_int 0)), fail.  */
9417       if (GET_CODE (varop) == CLOBBER)
9418 	return NULL_RTX;
9419 
9420       /* Convert ROTATERT to ROTATE.  */
9421       if (code == ROTATERT)
9422 	{
9423 	  unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9424 	  code = ROTATE;
9425 	  if (VECTOR_MODE_P (result_mode))
9426 	    count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9427 	  else
9428 	    count = bitsize - count;
9429 	}
9430 
9431       shift_mode = try_widen_shift_mode (code, varop, count, result_mode,
9432 					 mode, outer_op, outer_const);
9433 
9434       /* Handle cases where the count is greater than the size of the mode
9435 	 minus 1.  For ASHIFT, use the size minus one as the count (this can
9436 	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
9437 	 take the count modulo the size.  For other shifts, the result is
9438 	 zero.
9439 
9440 	 Since these shifts are being produced by the compiler by combining
9441 	 multiple operations, each of which are defined, we know what the
9442 	 result is supposed to be.  */
9443 
9444       if (count > (GET_MODE_BITSIZE (shift_mode) - 1))
9445 	{
9446 	  if (code == ASHIFTRT)
9447 	    count = GET_MODE_BITSIZE (shift_mode) - 1;
9448 	  else if (code == ROTATE || code == ROTATERT)
9449 	    count %= GET_MODE_BITSIZE (shift_mode);
9450 	  else
9451 	    {
9452 	      /* We can't simply return zero because there may be an
9453 		 outer op.  */
9454 	      varop = const0_rtx;
9455 	      count = 0;
9456 	      break;
9457 	    }
9458 	}
9459 
9460       /* If we discovered we had to complement VAROP, leave.  Making a NOT
9461 	 here would cause an infinite loop.  */
9462       if (complement_p)
9463 	break;
9464 
9465       /* An arithmetic right shift of a quantity known to be -1 or 0
9466 	 is a no-op.  */
9467       if (code == ASHIFTRT
9468 	  && (num_sign_bit_copies (varop, shift_mode)
9469 	      == GET_MODE_BITSIZE (shift_mode)))
9470 	{
9471 	  count = 0;
9472 	  break;
9473 	}
9474 
9475       /* If we are doing an arithmetic right shift and discarding all but
9476 	 the sign bit copies, this is equivalent to doing a shift by the
9477 	 bitsize minus one.  Convert it into that shift because it will often
9478 	 allow other simplifications.  */
9479 
9480       if (code == ASHIFTRT
9481 	  && (count + num_sign_bit_copies (varop, shift_mode)
9482 	      >= GET_MODE_BITSIZE (shift_mode)))
9483 	count = GET_MODE_BITSIZE (shift_mode) - 1;
9484 
9485       /* We simplify the tests below and elsewhere by converting
9486 	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9487 	 `make_compound_operation' will convert it to an ASHIFTRT for
9488 	 those machines (such as VAX) that don't have an LSHIFTRT.  */
9489       if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9490 	  && code == ASHIFTRT
9491 	  && ((nonzero_bits (varop, shift_mode)
9492 	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9493 	      == 0))
9494 	code = LSHIFTRT;
9495 
9496       if (((code == LSHIFTRT
9497 	    && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9498 	    && !(nonzero_bits (varop, shift_mode) >> count))
9499 	   || (code == ASHIFT
9500 	       && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9501 	       && !((nonzero_bits (varop, shift_mode) << count)
9502 		    & GET_MODE_MASK (shift_mode))))
9503 	  && !side_effects_p (varop))
9504 	varop = const0_rtx;
9505 
9506       switch (GET_CODE (varop))
9507 	{
9508 	case SIGN_EXTEND:
9509 	case ZERO_EXTEND:
9510 	case SIGN_EXTRACT:
9511 	case ZERO_EXTRACT:
9512 	  new_rtx = expand_compound_operation (varop);
9513 	  if (new_rtx != varop)
9514 	    {
9515 	      varop = new_rtx;
9516 	      continue;
9517 	    }
9518 	  break;
9519 
9520 	case MEM:
9521 	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9522 	     minus the width of a smaller mode, we can do this with a
9523 	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9524 	  if ((code == ASHIFTRT || code == LSHIFTRT)
9525 	      && ! mode_dependent_address_p (XEXP (varop, 0))
9526 	      && ! MEM_VOLATILE_P (varop)
9527 	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9528 					 MODE_INT, 1)) != BLKmode)
9529 	    {
9530 	      new_rtx = adjust_address_nv (varop, tmode,
9531 				       BYTES_BIG_ENDIAN ? 0
9532 				       : count / BITS_PER_UNIT);
9533 
9534 	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9535 				     : ZERO_EXTEND, mode, new_rtx);
9536 	      count = 0;
9537 	      continue;
9538 	    }
9539 	  break;
9540 
9541 	case SUBREG:
9542 	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
9543 	     the same number of words as what we've seen so far.  Then store
9544 	     the widest mode in MODE.  */
9545 	  if (subreg_lowpart_p (varop)
9546 	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9547 		  > GET_MODE_SIZE (GET_MODE (varop)))
9548 	      && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9549 				  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9550 		 == mode_words
9551 	      && GET_MODE_CLASS (GET_MODE (varop)) == MODE_INT
9552 	      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (varop))) == MODE_INT)
9553 	    {
9554 	      varop = SUBREG_REG (varop);
9555 	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9556 		mode = GET_MODE (varop);
9557 	      continue;
9558 	    }
9559 	  break;
9560 
9561 	case MULT:
9562 	  /* Some machines use MULT instead of ASHIFT because MULT
9563 	     is cheaper.  But it is still better on those machines to
9564 	     merge two shifts into one.  */
9565 	  if (CONST_INT_P (XEXP (varop, 1))
9566 	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9567 	    {
9568 	      varop
9569 		= simplify_gen_binary (ASHIFT, GET_MODE (varop),
9570 				       XEXP (varop, 0),
9571 				       GEN_INT (exact_log2 (
9572 						INTVAL (XEXP (varop, 1)))));
9573 	      continue;
9574 	    }
9575 	  break;
9576 
9577 	case UDIV:
9578 	  /* Similar, for when divides are cheaper.  */
9579 	  if (CONST_INT_P (XEXP (varop, 1))
9580 	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9581 	    {
9582 	      varop
9583 		= simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
9584 				       XEXP (varop, 0),
9585 				       GEN_INT (exact_log2 (
9586 						INTVAL (XEXP (varop, 1)))));
9587 	      continue;
9588 	    }
9589 	  break;
9590 
9591 	case ASHIFTRT:
9592 	  /* If we are extracting just the sign bit of an arithmetic
9593 	     right shift, that shift is not needed.  However, the sign
9594 	     bit of a wider mode may be different from what would be
9595 	     interpreted as the sign bit in a narrower mode, so, if
9596 	     the result is narrower, don't discard the shift.  */
9597 	  if (code == LSHIFTRT
9598 	      && count == (GET_MODE_BITSIZE (result_mode) - 1)
9599 	      && (GET_MODE_BITSIZE (result_mode)
9600 		  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9601 	    {
9602 	      varop = XEXP (varop, 0);
9603 	      continue;
9604 	    }
9605 
9606 	  /* ... fall through ...  */
9607 
9608 	case LSHIFTRT:
9609 	case ASHIFT:
9610 	case ROTATE:
9611 	  /* Here we have two nested shifts.  The result is usually the
9612 	     AND of a new shift with a mask.  We compute the result below.  */
9613 	  if (CONST_INT_P (XEXP (varop, 1))
9614 	      && INTVAL (XEXP (varop, 1)) >= 0
9615 	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9616 	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9617 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9618 	      && !VECTOR_MODE_P (result_mode))
9619 	    {
9620 	      enum rtx_code first_code = GET_CODE (varop);
9621 	      unsigned int first_count = INTVAL (XEXP (varop, 1));
9622 	      unsigned HOST_WIDE_INT mask;
9623 	      rtx mask_rtx;
9624 
9625 	      /* We have one common special case.  We can't do any merging if
9626 		 the inner code is an ASHIFTRT of a smaller mode.  However, if
9627 		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9628 		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9629 		 we can convert it to
9630 		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9631 		 This simplifies certain SIGN_EXTEND operations.  */
9632 	      if (code == ASHIFT && first_code == ASHIFTRT
9633 		  && count == (GET_MODE_BITSIZE (result_mode)
9634 			       - GET_MODE_BITSIZE (GET_MODE (varop))))
9635 		{
9636 		  /* C3 has the low-order C1 bits zero.  */
9637 
9638 		  mask = (GET_MODE_MASK (mode)
9639 			  & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9640 
9641 		  varop = simplify_and_const_int (NULL_RTX, result_mode,
9642 						  XEXP (varop, 0), mask);
9643 		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9644 						varop, count);
9645 		  count = first_count;
9646 		  code = ASHIFTRT;
9647 		  continue;
9648 		}
9649 
9650 	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9651 		 than C1 high-order bits equal to the sign bit, we can convert
9652 		 this to either an ASHIFT or an ASHIFTRT depending on the
9653 		 two counts.
9654 
9655 		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9656 
9657 	      if (code == ASHIFTRT && first_code == ASHIFT
9658 		  && GET_MODE (varop) == shift_mode
9659 		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9660 		      > first_count))
9661 		{
9662 		  varop = XEXP (varop, 0);
9663 		  count -= first_count;
9664 		  if (count < 0)
9665 		    {
9666 		      count = -count;
9667 		      code = ASHIFT;
9668 		    }
9669 
9670 		  continue;
9671 		}
9672 
9673 	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
9674 		 we can only do this if FIRST_CODE is also ASHIFTRT.
9675 
9676 		 We can't do the case when CODE is ROTATE and FIRST_CODE is
9677 		 ASHIFTRT.
9678 
9679 		 If the mode of this shift is not the mode of the outer shift,
9680 		 we can't do this if either shift is a right shift or ROTATE.
9681 
9682 		 Finally, we can't do any of these if the mode is too wide
9683 		 unless the codes are the same.
9684 
9685 		 Handle the case where the shift codes are the same
9686 		 first.  */
9687 
9688 	      if (code == first_code)
9689 		{
9690 		  if (GET_MODE (varop) != result_mode
9691 		      && (code == ASHIFTRT || code == LSHIFTRT
9692 			  || code == ROTATE))
9693 		    break;
9694 
9695 		  count += first_count;
9696 		  varop = XEXP (varop, 0);
9697 		  continue;
9698 		}
9699 
9700 	      if (code == ASHIFTRT
9701 		  || (code == ROTATE && first_code == ASHIFTRT)
9702 		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9703 		  || (GET_MODE (varop) != result_mode
9704 		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9705 			  || first_code == ROTATE
9706 			  || code == ROTATE)))
9707 		break;
9708 
9709 	      /* To compute the mask to apply after the shift, shift the
9710 		 nonzero bits of the inner shift the same way the
9711 		 outer shift will.  */
9712 
9713 	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9714 
9715 	      mask_rtx
9716 		= simplify_const_binary_operation (code, result_mode, mask_rtx,
9717 						   GEN_INT (count));
9718 
9719 	      /* Give up if we can't compute an outer operation to use.  */
9720 	      if (mask_rtx == 0
9721 		  || !CONST_INT_P (mask_rtx)
9722 		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9723 					INTVAL (mask_rtx),
9724 					result_mode, &complement_p))
9725 		break;
9726 
9727 	      /* If the shifts are in the same direction, we add the
9728 		 counts.  Otherwise, we subtract them.  */
9729 	      if ((code == ASHIFTRT || code == LSHIFTRT)
9730 		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9731 		count += first_count;
9732 	      else
9733 		count -= first_count;
9734 
9735 	      /* If COUNT is positive, the new shift is usually CODE,
9736 		 except for the two exceptions below, in which case it is
9737 		 FIRST_CODE.  If the count is negative, FIRST_CODE should
9738 		 always be used  */
9739 	      if (count > 0
9740 		  && ((first_code == ROTATE && code == ASHIFT)
9741 		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9742 		code = first_code;
9743 	      else if (count < 0)
9744 		code = first_code, count = -count;
9745 
9746 	      varop = XEXP (varop, 0);
9747 	      continue;
9748 	    }
9749 
9750 	  /* If we have (A << B << C) for any shift, we can convert this to
9751 	     (A << C << B).  This wins if A is a constant.  Only try this if
9752 	     B is not a constant.  */
9753 
9754 	  else if (GET_CODE (varop) == code
9755 		   && CONST_INT_P (XEXP (varop, 0))
9756 		   && !CONST_INT_P (XEXP (varop, 1)))
9757 	    {
9758 	      rtx new_rtx = simplify_const_binary_operation (code, mode,
9759 							 XEXP (varop, 0),
9760 							 GEN_INT (count));
9761 	      varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1));
9762 	      count = 0;
9763 	      continue;
9764 	    }
9765 	  break;
9766 
9767 	case NOT:
9768 	  if (VECTOR_MODE_P (mode))
9769 	    break;
9770 
9771 	  /* Make this fit the case below.  */
9772 	  varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9773 			       GEN_INT (GET_MODE_MASK (mode)));
9774 	  continue;
9775 
9776 	case IOR:
9777 	case AND:
9778 	case XOR:
9779 	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9780 	     with C the size of VAROP - 1 and the shift is logical if
9781 	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9782 	     we have an (le X 0) operation.   If we have an arithmetic shift
9783 	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
9784 	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9785 
9786 	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9787 	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9788 	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9789 	      && (code == LSHIFTRT || code == ASHIFTRT)
9790 	      && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9791 	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9792 	    {
9793 	      count = 0;
9794 	      varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9795 				  const0_rtx);
9796 
9797 	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9798 		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9799 
9800 	      continue;
9801 	    }
9802 
9803 	  /* If we have (shift (logical)), move the logical to the outside
9804 	     to allow it to possibly combine with another logical and the
9805 	     shift to combine with another shift.  This also canonicalizes to
9806 	     what a ZERO_EXTRACT looks like.  Also, some machines have
9807 	     (and (shift)) insns.  */
9808 
9809 	  if (CONST_INT_P (XEXP (varop, 1))
9810 	      /* We can't do this if we have (ashiftrt (xor))  and the
9811 		 constant has its sign bit set in shift_mode.  */
9812 	      && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9813 		   && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9814 					      shift_mode))
9815 	      && (new_rtx = simplify_const_binary_operation (code, result_mode,
9816 							 XEXP (varop, 1),
9817 							 GEN_INT (count))) != 0
9818 	      && CONST_INT_P (new_rtx)
9819 	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9820 				  INTVAL (new_rtx), result_mode, &complement_p))
9821 	    {
9822 	      varop = XEXP (varop, 0);
9823 	      continue;
9824 	    }
9825 
9826 	  /* If we can't do that, try to simplify the shift in each arm of the
9827 	     logical expression, make a new logical expression, and apply
9828 	     the inverse distributive law.  This also can't be done
9829 	     for some (ashiftrt (xor)).  */
9830 	  if (CONST_INT_P (XEXP (varop, 1))
9831 	     && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9832 		  && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9833 					     shift_mode)))
9834 	    {
9835 	      rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9836 					      XEXP (varop, 0), count);
9837 	      rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9838 					      XEXP (varop, 1), count);
9839 
9840 	      varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
9841 					   lhs, rhs);
9842 	      varop = apply_distributive_law (varop);
9843 
9844 	      count = 0;
9845 	      continue;
9846 	    }
9847 	  break;
9848 
9849 	case EQ:
9850 	  /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9851 	     says that the sign bit can be tested, FOO has mode MODE, C is
9852 	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9853 	     that may be nonzero.  */
9854 	  if (code == LSHIFTRT
9855 	      && XEXP (varop, 1) == const0_rtx
9856 	      && GET_MODE (XEXP (varop, 0)) == result_mode
9857 	      && count == (GET_MODE_BITSIZE (result_mode) - 1)
9858 	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9859 	      && STORE_FLAG_VALUE == -1
9860 	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9861 	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9862 				  (HOST_WIDE_INT) 1, result_mode,
9863 				  &complement_p))
9864 	    {
9865 	      varop = XEXP (varop, 0);
9866 	      count = 0;
9867 	      continue;
9868 	    }
9869 	  break;
9870 
9871 	case NEG:
9872 	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9873 	     than the number of bits in the mode is equivalent to A.  */
9874 	  if (code == LSHIFTRT
9875 	      && count == (GET_MODE_BITSIZE (result_mode) - 1)
9876 	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9877 	    {
9878 	      varop = XEXP (varop, 0);
9879 	      count = 0;
9880 	      continue;
9881 	    }
9882 
9883 	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
9884 	     NEG outside to allow shifts to combine.  */
9885 	  if (code == ASHIFT
9886 	      && merge_outer_ops (&outer_op, &outer_const, NEG,
9887 				  (HOST_WIDE_INT) 0, result_mode,
9888 				  &complement_p))
9889 	    {
9890 	      varop = XEXP (varop, 0);
9891 	      continue;
9892 	    }
9893 	  break;
9894 
9895 	case PLUS:
9896 	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9897 	     is one less than the number of bits in the mode is
9898 	     equivalent to (xor A 1).  */
9899 	  if (code == LSHIFTRT
9900 	      && count == (GET_MODE_BITSIZE (result_mode) - 1)
9901 	      && XEXP (varop, 1) == constm1_rtx
9902 	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9903 	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9904 				  (HOST_WIDE_INT) 1, result_mode,
9905 				  &complement_p))
9906 	    {
9907 	      count = 0;
9908 	      varop = XEXP (varop, 0);
9909 	      continue;
9910 	    }
9911 
9912 	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9913 	     that might be nonzero in BAR are those being shifted out and those
9914 	     bits are known zero in FOO, we can replace the PLUS with FOO.
9915 	     Similarly in the other operand order.  This code occurs when
9916 	     we are computing the size of a variable-size array.  */
9917 
9918 	  if ((code == ASHIFTRT || code == LSHIFTRT)
9919 	      && count < HOST_BITS_PER_WIDE_INT
9920 	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9921 	      && (nonzero_bits (XEXP (varop, 1), result_mode)
9922 		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9923 	    {
9924 	      varop = XEXP (varop, 0);
9925 	      continue;
9926 	    }
9927 	  else if ((code == ASHIFTRT || code == LSHIFTRT)
9928 		   && count < HOST_BITS_PER_WIDE_INT
9929 		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9930 		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9931 			    >> count)
9932 		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9933 			    & nonzero_bits (XEXP (varop, 1),
9934 						 result_mode)))
9935 	    {
9936 	      varop = XEXP (varop, 1);
9937 	      continue;
9938 	    }
9939 
9940 	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9941 	  if (code == ASHIFT
9942 	      && CONST_INT_P (XEXP (varop, 1))
9943 	      && (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode,
9944 							 XEXP (varop, 1),
9945 							 GEN_INT (count))) != 0
9946 	      && CONST_INT_P (new_rtx)
9947 	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
9948 				  INTVAL (new_rtx), result_mode, &complement_p))
9949 	    {
9950 	      varop = XEXP (varop, 0);
9951 	      continue;
9952 	    }
9953 
9954 	  /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9955 	     signbit', and attempt to change the PLUS to an XOR and move it to
9956 	     the outer operation as is done above in the AND/IOR/XOR case
9957 	     leg for shift(logical). See details in logical handling above
9958 	     for reasoning in doing so.  */
9959 	  if (code == LSHIFTRT
9960 	      && CONST_INT_P (XEXP (varop, 1))
9961 	      && mode_signbit_p (result_mode, XEXP (varop, 1))
9962 	      && (new_rtx = simplify_const_binary_operation (code, result_mode,
9963 							 XEXP (varop, 1),
9964 							 GEN_INT (count))) != 0
9965 	      && CONST_INT_P (new_rtx)
9966 	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9967 				  INTVAL (new_rtx), result_mode, &complement_p))
9968 	    {
9969 	      varop = XEXP (varop, 0);
9970 	      continue;
9971 	    }
9972 
9973 	  break;
9974 
9975 	case MINUS:
9976 	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9977 	     with C the size of VAROP - 1 and the shift is logical if
9978 	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9979 	     we have a (gt X 0) operation.  If the shift is arithmetic with
9980 	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9981 	     we have a (neg (gt X 0)) operation.  */
9982 
9983 	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9984 	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9985 	      && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9986 	      && (code == LSHIFTRT || code == ASHIFTRT)
9987 	      && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
9988 	      && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9989 	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9990 	    {
9991 	      count = 0;
9992 	      varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9993 				  const0_rtx);
9994 
9995 	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9996 		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9997 
9998 	      continue;
9999 	    }
10000 	  break;
10001 
10002 	case TRUNCATE:
10003 	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
10004 	     if the truncate does not affect the value.  */
10005 	  if (code == LSHIFTRT
10006 	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
10007 	      && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
10008 	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
10009 		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
10010 		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
10011 	    {
10012 	      rtx varop_inner = XEXP (varop, 0);
10013 
10014 	      varop_inner
10015 		= gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
10016 				    XEXP (varop_inner, 0),
10017 				    GEN_INT
10018 				    (count + INTVAL (XEXP (varop_inner, 1))));
10019 	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
10020 	      count = 0;
10021 	      continue;
10022 	    }
10023 	  break;
10024 
10025 	default:
10026 	  break;
10027 	}
10028 
10029       break;
10030     }
10031 
10032   shift_mode = try_widen_shift_mode (code, varop, count, result_mode, mode,
10033 				     outer_op, outer_const);
10034 
10035   /* We have now finished analyzing the shift.  The result should be
10036      a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
10037      OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
10038      to the result of the shift.  OUTER_CONST is the relevant constant,
10039      but we must turn off all bits turned off in the shift.  */
10040 
10041   if (outer_op == UNKNOWN
10042       && orig_code == code && orig_count == count
10043       && varop == orig_varop
10044       && shift_mode == GET_MODE (varop))
10045     return NULL_RTX;
10046 
10047   /* Make a SUBREG if necessary.  If we can't make it, fail.  */
10048   varop = gen_lowpart (shift_mode, varop);
10049   if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
10050     return NULL_RTX;
10051 
10052   /* If we have an outer operation and we just made a shift, it is
10053      possible that we could have simplified the shift were it not
10054      for the outer operation.  So try to do the simplification
10055      recursively.  */
10056 
10057   if (outer_op != UNKNOWN)
10058     x = simplify_shift_const_1 (code, shift_mode, varop, count);
10059   else
10060     x = NULL_RTX;
10061 
10062   if (x == NULL_RTX)
10063     x = simplify_gen_binary (code, shift_mode, varop, GEN_INT (count));
10064 
10065   /* If we were doing an LSHIFTRT in a wider mode than it was originally,
10066      turn off all the bits that the shift would have turned off.  */
10067   if (orig_code == LSHIFTRT && result_mode != shift_mode)
10068     x = simplify_and_const_int (NULL_RTX, shift_mode, x,
10069 				GET_MODE_MASK (result_mode) >> orig_count);
10070 
10071   /* Do the remainder of the processing in RESULT_MODE.  */
10072   x = gen_lowpart_or_truncate (result_mode, x);
10073 
10074   /* If COMPLEMENT_P is set, we have to complement X before doing the outer
10075      operation.  */
10076   if (complement_p)
10077     x = simplify_gen_unary (NOT, result_mode, x, result_mode);
10078 
10079   if (outer_op != UNKNOWN)
10080     {
10081       if (GET_RTX_CLASS (outer_op) != RTX_UNARY
10082 	  && GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
10083 	outer_const = trunc_int_for_mode (outer_const, result_mode);
10084 
10085       if (outer_op == AND)
10086 	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
10087       else if (outer_op == SET)
10088 	{
10089 	  /* This means that we have determined that the result is
10090 	     equivalent to a constant.  This should be rare.  */
10091 	  if (!side_effects_p (x))
10092 	    x = GEN_INT (outer_const);
10093 	}
10094       else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
10095 	x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
10096       else
10097 	x = simplify_gen_binary (outer_op, result_mode, x,
10098 				 GEN_INT (outer_const));
10099     }
10100 
10101   return x;
10102 }
10103 
10104 /* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
10105    The result of the shift is RESULT_MODE.  If we cannot simplify it,
10106    return X or, if it is NULL, synthesize the expression with
10107    simplify_gen_binary.  Otherwise, return a simplified value.
10108 
10109    The shift is normally computed in the widest mode we find in VAROP, as
10110    long as it isn't a different number of words than RESULT_MODE.  Exceptions
10111    are ASHIFTRT and ROTATE, which are always done in their original mode.  */
10112 
10113 static rtx
10114 simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
10115 		      rtx varop, int count)
10116 {
10117   rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
10118   if (tem)
10119     return tem;
10120 
10121   if (!x)
10122     x = simplify_gen_binary (code, GET_MODE (varop), varop, GEN_INT (count));
10123   if (GET_MODE (x) != result_mode)
10124     x = gen_lowpart (result_mode, x);
10125   return x;
10126 }
10127 
10128 
10129 /* Like recog, but we receive the address of a pointer to a new pattern.
10130    We try to match the rtx that the pointer points to.
10131    If that fails, we may try to modify or replace the pattern,
10132    storing the replacement into the same pointer object.
10133 
10134    Modifications include deletion or addition of CLOBBERs.
10135 
10136    PNOTES is a pointer to a location where any REG_UNUSED notes added for
10137    the CLOBBERs are placed.
10138 
10139    The value is the final insn code from the pattern ultimately matched,
10140    or -1.  */
10141 
10142 static int
10143 recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
10144 {
10145   rtx pat = *pnewpat;
10146   int insn_code_number;
10147   int num_clobbers_to_add = 0;
10148   int i;
10149   rtx notes = 0;
10150   rtx old_notes, old_pat;
10151 
10152   /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
10153      we use to indicate that something didn't match.  If we find such a
10154      thing, force rejection.  */
10155   if (GET_CODE (pat) == PARALLEL)
10156     for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
10157       if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
10158 	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
10159 	return -1;
10160 
10161   old_pat = PATTERN (insn);
10162   old_notes = REG_NOTES (insn);
10163   PATTERN (insn) = pat;
10164   REG_NOTES (insn) = 0;
10165 
10166   insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10167   if (dump_file && (dump_flags & TDF_DETAILS))
10168     {
10169       if (insn_code_number < 0)
10170 	fputs ("Failed to match this instruction:\n", dump_file);
10171       else
10172 	fputs ("Successfully matched this instruction:\n", dump_file);
10173       print_rtl_single (dump_file, pat);
10174     }
10175 
10176   /* If it isn't, there is the possibility that we previously had an insn
10177      that clobbered some register as a side effect, but the combined
10178      insn doesn't need to do that.  So try once more without the clobbers
10179      unless this represents an ASM insn.  */
10180 
10181   if (insn_code_number < 0 && ! check_asm_operands (pat)
10182       && GET_CODE (pat) == PARALLEL)
10183     {
10184       int pos;
10185 
10186       for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
10187 	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
10188 	  {
10189 	    if (i != pos)
10190 	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
10191 	    pos++;
10192 	  }
10193 
10194       SUBST_INT (XVECLEN (pat, 0), pos);
10195 
10196       if (pos == 1)
10197 	pat = XVECEXP (pat, 0, 0);
10198 
10199       PATTERN (insn) = pat;
10200       insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10201       if (dump_file && (dump_flags & TDF_DETAILS))
10202 	{
10203 	  if (insn_code_number < 0)
10204 	    fputs ("Failed to match this instruction:\n", dump_file);
10205 	  else
10206 	    fputs ("Successfully matched this instruction:\n", dump_file);
10207 	  print_rtl_single (dump_file, pat);
10208 	}
10209     }
10210   PATTERN (insn) = old_pat;
10211   REG_NOTES (insn) = old_notes;
10212 
10213   /* Recognize all noop sets, these will be killed by followup pass.  */
10214   if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
10215     insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
10216 
10217   /* If we had any clobbers to add, make a new pattern than contains
10218      them.  Then check to make sure that all of them are dead.  */
10219   if (num_clobbers_to_add)
10220     {
10221       rtx newpat = gen_rtx_PARALLEL (VOIDmode,
10222 				     rtvec_alloc (GET_CODE (pat) == PARALLEL
10223 						  ? (XVECLEN (pat, 0)
10224 						     + num_clobbers_to_add)
10225 						  : num_clobbers_to_add + 1));
10226 
10227       if (GET_CODE (pat) == PARALLEL)
10228 	for (i = 0; i < XVECLEN (pat, 0); i++)
10229 	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
10230       else
10231 	XVECEXP (newpat, 0, 0) = pat;
10232 
10233       add_clobbers (newpat, insn_code_number);
10234 
10235       for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
10236 	   i < XVECLEN (newpat, 0); i++)
10237 	{
10238 	  if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
10239 	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
10240 	    return -1;
10241 	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
10242 	    {
10243 	      gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
10244 	      notes = alloc_reg_note (REG_UNUSED,
10245 				      XEXP (XVECEXP (newpat, 0, i), 0), notes);
10246 	    }
10247 	}
10248       pat = newpat;
10249     }
10250 
10251   *pnewpat = pat;
10252   *pnotes = notes;
10253 
10254   return insn_code_number;
10255 }
10256 
10257 /* Like gen_lowpart_general but for use by combine.  In combine it
10258    is not possible to create any new pseudoregs.  However, it is
10259    safe to create invalid memory addresses, because combine will
10260    try to recognize them and all they will do is make the combine
10261    attempt fail.
10262 
10263    If for some reason this cannot do its job, an rtx
10264    (clobber (const_int 0)) is returned.
10265    An insn containing that will not be recognized.  */
10266 
10267 static rtx
10268 gen_lowpart_for_combine (enum machine_mode omode, rtx x)
10269 {
10270   enum machine_mode imode = GET_MODE (x);
10271   unsigned int osize = GET_MODE_SIZE (omode);
10272   unsigned int isize = GET_MODE_SIZE (imode);
10273   rtx result;
10274 
10275   if (omode == imode)
10276     return x;
10277 
10278   /* Return identity if this is a CONST or symbolic reference.  */
10279   if (omode == Pmode
10280       && (GET_CODE (x) == CONST
10281 	  || GET_CODE (x) == SYMBOL_REF
10282 	  || GET_CODE (x) == LABEL_REF))
10283     return x;
10284 
10285   /* We can only support MODE being wider than a word if X is a
10286      constant integer or has a mode the same size.  */
10287   if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
10288       && ! ((imode == VOIDmode
10289 	     && (CONST_INT_P (x)
10290 		 || GET_CODE (x) == CONST_DOUBLE))
10291 	    || isize == osize))
10292     goto fail;
10293 
10294   /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
10295      won't know what to do.  So we will strip off the SUBREG here and
10296      process normally.  */
10297   if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
10298     {
10299       x = SUBREG_REG (x);
10300 
10301       /* For use in case we fall down into the address adjustments
10302 	 further below, we need to adjust the known mode and size of
10303 	 x; imode and isize, since we just adjusted x.  */
10304       imode = GET_MODE (x);
10305 
10306       if (imode == omode)
10307 	return x;
10308 
10309       isize = GET_MODE_SIZE (imode);
10310     }
10311 
10312   result = gen_lowpart_common (omode, x);
10313 
10314   if (result)
10315     return result;
10316 
10317   if (MEM_P (x))
10318     {
10319       int offset = 0;
10320 
10321       /* Refuse to work on a volatile memory ref or one with a mode-dependent
10322 	 address.  */
10323       if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10324 	goto fail;
10325 
10326       /* If we want to refer to something bigger than the original memref,
10327 	 generate a paradoxical subreg instead.  That will force a reload
10328 	 of the original memref X.  */
10329       if (isize < osize)
10330 	return gen_rtx_SUBREG (omode, x, 0);
10331 
10332       if (WORDS_BIG_ENDIAN)
10333 	offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
10334 
10335       /* Adjust the address so that the address-after-the-data is
10336 	 unchanged.  */
10337       if (BYTES_BIG_ENDIAN)
10338 	offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
10339 
10340       return adjust_address_nv (x, omode, offset);
10341     }
10342 
10343   /* If X is a comparison operator, rewrite it in a new mode.  This
10344      probably won't match, but may allow further simplifications.  */
10345   else if (COMPARISON_P (x))
10346     return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
10347 
10348   /* If we couldn't simplify X any other way, just enclose it in a
10349      SUBREG.  Normally, this SUBREG won't match, but some patterns may
10350      include an explicit SUBREG or we may simplify it further in combine.  */
10351   else
10352     {
10353       int offset = 0;
10354       rtx res;
10355 
10356       offset = subreg_lowpart_offset (omode, imode);
10357       if (imode == VOIDmode)
10358 	{
10359 	  imode = int_mode_for_mode (omode);
10360 	  x = gen_lowpart_common (imode, x);
10361 	  if (x == NULL)
10362 	    goto fail;
10363 	}
10364       res = simplify_gen_subreg (omode, x, imode, offset);
10365       if (res)
10366 	return res;
10367     }
10368 
10369  fail:
10370   return gen_rtx_CLOBBER (omode, const0_rtx);
10371 }
10372 
10373 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
10374    comparison code that will be tested.
10375 
10376    The result is a possibly different comparison code to use.  *POP0 and
10377    *POP1 may be updated.
10378 
10379    It is possible that we might detect that a comparison is either always
10380    true or always false.  However, we do not perform general constant
10381    folding in combine, so this knowledge isn't useful.  Such tautologies
10382    should have been detected earlier.  Hence we ignore all such cases.  */
10383 
10384 static enum rtx_code
10385 simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
10386 {
10387   rtx op0 = *pop0;
10388   rtx op1 = *pop1;
10389   rtx tem, tem1;
10390   int i;
10391   enum machine_mode mode, tmode;
10392 
10393   /* Try a few ways of applying the same transformation to both operands.  */
10394   while (1)
10395     {
10396 #ifndef WORD_REGISTER_OPERATIONS
10397       /* The test below this one won't handle SIGN_EXTENDs on these machines,
10398 	 so check specially.  */
10399       if (code != GTU && code != GEU && code != LTU && code != LEU
10400 	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10401 	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
10402 	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
10403 	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10404 	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10405 	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10406 	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10407 	  && CONST_INT_P (XEXP (op0, 1))
10408 	  && XEXP (op0, 1) == XEXP (op1, 1)
10409 	  && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10410 	  && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
10411 	  && (INTVAL (XEXP (op0, 1))
10412 	      == (GET_MODE_BITSIZE (GET_MODE (op0))
10413 		  - (GET_MODE_BITSIZE
10414 		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10415 	{
10416 	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10417 	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10418 	}
10419 #endif
10420 
10421       /* If both operands are the same constant shift, see if we can ignore the
10422 	 shift.  We can if the shift is a rotate or if the bits shifted out of
10423 	 this shift are known to be zero for both inputs and if the type of
10424 	 comparison is compatible with the shift.  */
10425       if (GET_CODE (op0) == GET_CODE (op1)
10426 	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10427 	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10428 	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10429 		  && (code != GT && code != LT && code != GE && code != LE))
10430 	      || (GET_CODE (op0) == ASHIFTRT
10431 		  && (code != GTU && code != LTU
10432 		      && code != GEU && code != LEU)))
10433 	  && CONST_INT_P (XEXP (op0, 1))
10434 	  && INTVAL (XEXP (op0, 1)) >= 0
10435 	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10436 	  && XEXP (op0, 1) == XEXP (op1, 1))
10437 	{
10438 	  enum machine_mode mode = GET_MODE (op0);
10439 	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10440 	  int shift_count = INTVAL (XEXP (op0, 1));
10441 
10442 	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10443 	    mask &= (mask >> shift_count) << shift_count;
10444 	  else if (GET_CODE (op0) == ASHIFT)
10445 	    mask = (mask & (mask << shift_count)) >> shift_count;
10446 
10447 	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10448 	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10449 	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10450 	  else
10451 	    break;
10452 	}
10453 
10454       /* If both operands are AND's of a paradoxical SUBREG by constant, the
10455 	 SUBREGs are of the same mode, and, in both cases, the AND would
10456 	 be redundant if the comparison was done in the narrower mode,
10457 	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10458 	 and the operand's possibly nonzero bits are 0xffffff01; in that case
10459 	 if we only care about QImode, we don't need the AND).  This case
10460 	 occurs if the output mode of an scc insn is not SImode and
10461 	 STORE_FLAG_VALUE == 1 (e.g., the 386).
10462 
10463 	 Similarly, check for a case where the AND's are ZERO_EXTEND
10464 	 operations from some narrower mode even though a SUBREG is not
10465 	 present.  */
10466 
10467       else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10468 	       && CONST_INT_P (XEXP (op0, 1))
10469 	       && CONST_INT_P (XEXP (op1, 1)))
10470 	{
10471 	  rtx inner_op0 = XEXP (op0, 0);
10472 	  rtx inner_op1 = XEXP (op1, 0);
10473 	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10474 	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10475 	  int changed = 0;
10476 
10477 	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10478 	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
10479 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10480 	      && (GET_MODE (SUBREG_REG (inner_op0))
10481 		  == GET_MODE (SUBREG_REG (inner_op1)))
10482 	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10483 		  <= HOST_BITS_PER_WIDE_INT)
10484 	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10485 					     GET_MODE (SUBREG_REG (inner_op0)))))
10486 	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10487 					     GET_MODE (SUBREG_REG (inner_op1))))))
10488 	    {
10489 	      op0 = SUBREG_REG (inner_op0);
10490 	      op1 = SUBREG_REG (inner_op1);
10491 
10492 	      /* The resulting comparison is always unsigned since we masked
10493 		 off the original sign bit.  */
10494 	      code = unsigned_condition (code);
10495 
10496 	      changed = 1;
10497 	    }
10498 
10499 	  else if (c0 == c1)
10500 	    for (tmode = GET_CLASS_NARROWEST_MODE
10501 		 (GET_MODE_CLASS (GET_MODE (op0)));
10502 		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10503 	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10504 		{
10505 		  op0 = gen_lowpart (tmode, inner_op0);
10506 		  op1 = gen_lowpart (tmode, inner_op1);
10507 		  code = unsigned_condition (code);
10508 		  changed = 1;
10509 		  break;
10510 		}
10511 
10512 	  if (! changed)
10513 	    break;
10514 	}
10515 
10516       /* If both operands are NOT, we can strip off the outer operation
10517 	 and adjust the comparison code for swapped operands; similarly for
10518 	 NEG, except that this must be an equality comparison.  */
10519       else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10520 	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10521 		   && (code == EQ || code == NE)))
10522 	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10523 
10524       else
10525 	break;
10526     }
10527 
10528   /* If the first operand is a constant, swap the operands and adjust the
10529      comparison code appropriately, but don't do this if the second operand
10530      is already a constant integer.  */
10531   if (swap_commutative_operands_p (op0, op1))
10532     {
10533       tem = op0, op0 = op1, op1 = tem;
10534       code = swap_condition (code);
10535     }
10536 
10537   /* We now enter a loop during which we will try to simplify the comparison.
10538      For the most part, we only are concerned with comparisons with zero,
10539      but some things may really be comparisons with zero but not start
10540      out looking that way.  */
10541 
10542   while (CONST_INT_P (op1))
10543     {
10544       enum machine_mode mode = GET_MODE (op0);
10545       unsigned int mode_width = GET_MODE_BITSIZE (mode);
10546       unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10547       int equality_comparison_p;
10548       int sign_bit_comparison_p;
10549       int unsigned_comparison_p;
10550       HOST_WIDE_INT const_op;
10551 
10552       /* We only want to handle integral modes.  This catches VOIDmode,
10553 	 CCmode, and the floating-point modes.  An exception is that we
10554 	 can handle VOIDmode if OP0 is a COMPARE or a comparison
10555 	 operation.  */
10556 
10557       if (GET_MODE_CLASS (mode) != MODE_INT
10558 	  && ! (mode == VOIDmode
10559 		&& (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
10560 	break;
10561 
10562       /* Get the constant we are comparing against and turn off all bits
10563 	 not on in our mode.  */
10564       const_op = INTVAL (op1);
10565       if (mode != VOIDmode)
10566 	const_op = trunc_int_for_mode (const_op, mode);
10567       op1 = GEN_INT (const_op);
10568 
10569       /* If we are comparing against a constant power of two and the value
10570 	 being compared can only have that single bit nonzero (e.g., it was
10571 	 `and'ed with that bit), we can replace this with a comparison
10572 	 with zero.  */
10573       if (const_op
10574 	  && (code == EQ || code == NE || code == GE || code == GEU
10575 	      || code == LT || code == LTU)
10576 	  && mode_width <= HOST_BITS_PER_WIDE_INT
10577 	  && exact_log2 (const_op) >= 0
10578 	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10579 	{
10580 	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10581 	  op1 = const0_rtx, const_op = 0;
10582 	}
10583 
10584       /* Similarly, if we are comparing a value known to be either -1 or
10585 	 0 with -1, change it to the opposite comparison against zero.  */
10586 
10587       if (const_op == -1
10588 	  && (code == EQ || code == NE || code == GT || code == LE
10589 	      || code == GEU || code == LTU)
10590 	  && num_sign_bit_copies (op0, mode) == mode_width)
10591 	{
10592 	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10593 	  op1 = const0_rtx, const_op = 0;
10594 	}
10595 
10596       /* Do some canonicalizations based on the comparison code.  We prefer
10597 	 comparisons against zero and then prefer equality comparisons.
10598 	 If we can reduce the size of a constant, we will do that too.  */
10599 
10600       switch (code)
10601 	{
10602 	case LT:
10603 	  /* < C is equivalent to <= (C - 1) */
10604 	  if (const_op > 0)
10605 	    {
10606 	      const_op -= 1;
10607 	      op1 = GEN_INT (const_op);
10608 	      code = LE;
10609 	      /* ... fall through to LE case below.  */
10610 	    }
10611 	  else
10612 	    break;
10613 
10614 	case LE:
10615 	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10616 	  if (const_op < 0)
10617 	    {
10618 	      const_op += 1;
10619 	      op1 = GEN_INT (const_op);
10620 	      code = LT;
10621 	    }
10622 
10623 	  /* If we are doing a <= 0 comparison on a value known to have
10624 	     a zero sign bit, we can replace this with == 0.  */
10625 	  else if (const_op == 0
10626 		   && mode_width <= HOST_BITS_PER_WIDE_INT
10627 		   && (nonzero_bits (op0, mode)
10628 		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10629 	    code = EQ;
10630 	  break;
10631 
10632 	case GE:
10633 	  /* >= C is equivalent to > (C - 1).  */
10634 	  if (const_op > 0)
10635 	    {
10636 	      const_op -= 1;
10637 	      op1 = GEN_INT (const_op);
10638 	      code = GT;
10639 	      /* ... fall through to GT below.  */
10640 	    }
10641 	  else
10642 	    break;
10643 
10644 	case GT:
10645 	  /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10646 	  if (const_op < 0)
10647 	    {
10648 	      const_op += 1;
10649 	      op1 = GEN_INT (const_op);
10650 	      code = GE;
10651 	    }
10652 
10653 	  /* If we are doing a > 0 comparison on a value known to have
10654 	     a zero sign bit, we can replace this with != 0.  */
10655 	  else if (const_op == 0
10656 		   && mode_width <= HOST_BITS_PER_WIDE_INT
10657 		   && (nonzero_bits (op0, mode)
10658 		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10659 	    code = NE;
10660 	  break;
10661 
10662 	case LTU:
10663 	  /* < C is equivalent to <= (C - 1).  */
10664 	  if (const_op > 0)
10665 	    {
10666 	      const_op -= 1;
10667 	      op1 = GEN_INT (const_op);
10668 	      code = LEU;
10669 	      /* ... fall through ...  */
10670 	    }
10671 
10672 	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10673 	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10674 		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10675 	    {
10676 	      const_op = 0, op1 = const0_rtx;
10677 	      code = GE;
10678 	      break;
10679 	    }
10680 	  else
10681 	    break;
10682 
10683 	case LEU:
10684 	  /* unsigned <= 0 is equivalent to == 0 */
10685 	  if (const_op == 0)
10686 	    code = EQ;
10687 
10688 	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10689 	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10690 		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10691 	    {
10692 	      const_op = 0, op1 = const0_rtx;
10693 	      code = GE;
10694 	    }
10695 	  break;
10696 
10697 	case GEU:
10698 	  /* >= C is equivalent to > (C - 1).  */
10699 	  if (const_op > 1)
10700 	    {
10701 	      const_op -= 1;
10702 	      op1 = GEN_INT (const_op);
10703 	      code = GTU;
10704 	      /* ... fall through ...  */
10705 	    }
10706 
10707 	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10708 	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10709 		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10710 	    {
10711 	      const_op = 0, op1 = const0_rtx;
10712 	      code = LT;
10713 	      break;
10714 	    }
10715 	  else
10716 	    break;
10717 
10718 	case GTU:
10719 	  /* unsigned > 0 is equivalent to != 0 */
10720 	  if (const_op == 0)
10721 	    code = NE;
10722 
10723 	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10724 	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10725 		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10726 	    {
10727 	      const_op = 0, op1 = const0_rtx;
10728 	      code = LT;
10729 	    }
10730 	  break;
10731 
10732 	default:
10733 	  break;
10734 	}
10735 
10736       /* Compute some predicates to simplify code below.  */
10737 
10738       equality_comparison_p = (code == EQ || code == NE);
10739       sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10740       unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10741 			       || code == GEU);
10742 
10743       /* If this is a sign bit comparison and we can do arithmetic in
10744 	 MODE, say that we will only be needing the sign bit of OP0.  */
10745       if (sign_bit_comparison_p
10746 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10747 	op0 = force_to_mode (op0, mode,
10748 			     ((HOST_WIDE_INT) 1
10749 			      << (GET_MODE_BITSIZE (mode) - 1)),
10750 			     0);
10751 
10752       /* Now try cases based on the opcode of OP0.  If none of the cases
10753 	 does a "continue", we exit this loop immediately after the
10754 	 switch.  */
10755 
10756       switch (GET_CODE (op0))
10757 	{
10758 	case ZERO_EXTRACT:
10759 	  /* If we are extracting a single bit from a variable position in
10760 	     a constant that has only a single bit set and are comparing it
10761 	     with zero, we can convert this into an equality comparison
10762 	     between the position and the location of the single bit.  */
10763 	  /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10764 	     have already reduced the shift count modulo the word size.  */
10765 	  if (!SHIFT_COUNT_TRUNCATED
10766 	      && CONST_INT_P (XEXP (op0, 0))
10767 	      && XEXP (op0, 1) == const1_rtx
10768 	      && equality_comparison_p && const_op == 0
10769 	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10770 	    {
10771 	      if (BITS_BIG_ENDIAN)
10772 		{
10773 		  enum machine_mode new_mode
10774 		    = mode_for_extraction (EP_extzv, 1);
10775 		  if (new_mode == MAX_MACHINE_MODE)
10776 		    i = BITS_PER_WORD - 1 - i;
10777 		  else
10778 		    {
10779 		      mode = new_mode;
10780 		      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10781 		    }
10782 		}
10783 
10784 	      op0 = XEXP (op0, 2);
10785 	      op1 = GEN_INT (i);
10786 	      const_op = i;
10787 
10788 	      /* Result is nonzero iff shift count is equal to I.  */
10789 	      code = reverse_condition (code);
10790 	      continue;
10791 	    }
10792 
10793 	  /* ... fall through ...  */
10794 
10795 	case SIGN_EXTRACT:
10796 	  tem = expand_compound_operation (op0);
10797 	  if (tem != op0)
10798 	    {
10799 	      op0 = tem;
10800 	      continue;
10801 	    }
10802 	  break;
10803 
10804 	case NOT:
10805 	  /* If testing for equality, we can take the NOT of the constant.  */
10806 	  if (equality_comparison_p
10807 	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10808 	    {
10809 	      op0 = XEXP (op0, 0);
10810 	      op1 = tem;
10811 	      continue;
10812 	    }
10813 
10814 	  /* If just looking at the sign bit, reverse the sense of the
10815 	     comparison.  */
10816 	  if (sign_bit_comparison_p)
10817 	    {
10818 	      op0 = XEXP (op0, 0);
10819 	      code = (code == GE ? LT : GE);
10820 	      continue;
10821 	    }
10822 	  break;
10823 
10824 	case NEG:
10825 	  /* If testing for equality, we can take the NEG of the constant.  */
10826 	  if (equality_comparison_p
10827 	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10828 	    {
10829 	      op0 = XEXP (op0, 0);
10830 	      op1 = tem;
10831 	      continue;
10832 	    }
10833 
10834 	  /* The remaining cases only apply to comparisons with zero.  */
10835 	  if (const_op != 0)
10836 	    break;
10837 
10838 	  /* When X is ABS or is known positive,
10839 	     (neg X) is < 0 if and only if X != 0.  */
10840 
10841 	  if (sign_bit_comparison_p
10842 	      && (GET_CODE (XEXP (op0, 0)) == ABS
10843 		  || (mode_width <= HOST_BITS_PER_WIDE_INT
10844 		      && (nonzero_bits (XEXP (op0, 0), mode)
10845 			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10846 	    {
10847 	      op0 = XEXP (op0, 0);
10848 	      code = (code == LT ? NE : EQ);
10849 	      continue;
10850 	    }
10851 
10852 	  /* If we have NEG of something whose two high-order bits are the
10853 	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10854 	  if (num_sign_bit_copies (op0, mode) >= 2)
10855 	    {
10856 	      op0 = XEXP (op0, 0);
10857 	      code = swap_condition (code);
10858 	      continue;
10859 	    }
10860 	  break;
10861 
10862 	case ROTATE:
10863 	  /* If we are testing equality and our count is a constant, we
10864 	     can perform the inverse operation on our RHS.  */
10865 	  if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
10866 	      && (tem = simplify_binary_operation (ROTATERT, mode,
10867 						   op1, XEXP (op0, 1))) != 0)
10868 	    {
10869 	      op0 = XEXP (op0, 0);
10870 	      op1 = tem;
10871 	      continue;
10872 	    }
10873 
10874 	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10875 	     a particular bit.  Convert it to an AND of a constant of that
10876 	     bit.  This will be converted into a ZERO_EXTRACT.  */
10877 	  if (const_op == 0 && sign_bit_comparison_p
10878 	      && CONST_INT_P (XEXP (op0, 1))
10879 	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10880 	    {
10881 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10882 					    ((HOST_WIDE_INT) 1
10883 					     << (mode_width - 1
10884 						 - INTVAL (XEXP (op0, 1)))));
10885 	      code = (code == LT ? NE : EQ);
10886 	      continue;
10887 	    }
10888 
10889 	  /* Fall through.  */
10890 
10891 	case ABS:
10892 	  /* ABS is ignorable inside an equality comparison with zero.  */
10893 	  if (const_op == 0 && equality_comparison_p)
10894 	    {
10895 	      op0 = XEXP (op0, 0);
10896 	      continue;
10897 	    }
10898 	  break;
10899 
10900 	case SIGN_EXTEND:
10901 	  /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10902 	     (compare FOO CONST) if CONST fits in FOO's mode and we
10903 	     are either testing inequality or have an unsigned
10904 	     comparison with ZERO_EXTEND or a signed comparison with
10905 	     SIGN_EXTEND.  But don't do it if we don't have a compare
10906 	     insn of the given mode, since we'd have to revert it
10907 	     later on, and then we wouldn't know whether to sign- or
10908 	     zero-extend.  */
10909 	  mode = GET_MODE (XEXP (op0, 0));
10910 	  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10911 	      && ! unsigned_comparison_p
10912 	      && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10913 	      && ((unsigned HOST_WIDE_INT) const_op
10914 		  < (((unsigned HOST_WIDE_INT) 1
10915 		      << (GET_MODE_BITSIZE (mode) - 1))))
10916 	      && have_insn_for (COMPARE, mode))
10917 	    {
10918 	      op0 = XEXP (op0, 0);
10919 	      continue;
10920 	    }
10921 	  break;
10922 
10923 	case SUBREG:
10924 	  /* Check for the case where we are comparing A - C1 with C2, that is
10925 
10926 	       (subreg:MODE (plus (A) (-C1))) op (C2)
10927 
10928 	     with C1 a constant, and try to lift the SUBREG, i.e. to do the
10929 	     comparison in the wider mode.  One of the following two conditions
10930 	     must be true in order for this to be valid:
10931 
10932 	       1. The mode extension results in the same bit pattern being added
10933 		  on both sides and the comparison is equality or unsigned.  As
10934 		  C2 has been truncated to fit in MODE, the pattern can only be
10935 		  all 0s or all 1s.
10936 
10937 	       2. The mode extension results in the sign bit being copied on
10938 		  each side.
10939 
10940 	     The difficulty here is that we have predicates for A but not for
10941 	     (A - C1) so we need to check that C1 is within proper bounds so
10942 	     as to perturbate A as little as possible.  */
10943 
10944 	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10945 	      && subreg_lowpart_p (op0)
10946 	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10947 	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10948 	      && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
10949 	    {
10950 	      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10951 	      rtx a = XEXP (SUBREG_REG (op0), 0);
10952 	      HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10953 
10954 	      if ((c1 > 0
10955 		   && (unsigned HOST_WIDE_INT) c1
10956 		       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10957 		   && (equality_comparison_p || unsigned_comparison_p)
10958 		   /* (A - C1) zero-extends if it is positive and sign-extends
10959 		      if it is negative, C2 both zero- and sign-extends.  */
10960 		   && ((0 == (nonzero_bits (a, inner_mode)
10961 			      & ~GET_MODE_MASK (mode))
10962 			&& const_op >= 0)
10963 		       /* (A - C1) sign-extends if it is positive and 1-extends
10964 			  if it is negative, C2 both sign- and 1-extends.  */
10965 		       || (num_sign_bit_copies (a, inner_mode)
10966 			   > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10967 					     - mode_width)
10968 			   && const_op < 0)))
10969 		  || ((unsigned HOST_WIDE_INT) c1
10970 		       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10971 		      /* (A - C1) always sign-extends, like C2.  */
10972 		      && num_sign_bit_copies (a, inner_mode)
10973 			 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10974 					   - (mode_width - 1))))
10975 		{
10976 		  op0 = SUBREG_REG (op0);
10977 		  continue;
10978 		}
10979 	    }
10980 
10981 	  /* If the inner mode is narrower and we are extracting the low part,
10982 	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10983 	  if (subreg_lowpart_p (op0)
10984 	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10985 	    /* Fall through */ ;
10986 	  else
10987 	    break;
10988 
10989 	  /* ... fall through ...  */
10990 
10991 	case ZERO_EXTEND:
10992 	  mode = GET_MODE (XEXP (op0, 0));
10993 	  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10994 	      && (unsigned_comparison_p || equality_comparison_p)
10995 	      && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10996 	      && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
10997 	      && have_insn_for (COMPARE, mode))
10998 	    {
10999 	      op0 = XEXP (op0, 0);
11000 	      continue;
11001 	    }
11002 	  break;
11003 
11004 	case PLUS:
11005 	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
11006 	     this for equality comparisons due to pathological cases involving
11007 	     overflows.  */
11008 	  if (equality_comparison_p
11009 	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
11010 							op1, XEXP (op0, 1))))
11011 	    {
11012 	      op0 = XEXP (op0, 0);
11013 	      op1 = tem;
11014 	      continue;
11015 	    }
11016 
11017 	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
11018 	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
11019 	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
11020 	    {
11021 	      op0 = XEXP (XEXP (op0, 0), 0);
11022 	      code = (code == LT ? EQ : NE);
11023 	      continue;
11024 	    }
11025 	  break;
11026 
11027 	case MINUS:
11028 	  /* We used to optimize signed comparisons against zero, but that
11029 	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
11030 	     arrive here as equality comparisons, or (GEU, LTU) are
11031 	     optimized away.  No need to special-case them.  */
11032 
11033 	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
11034 	     (eq B (minus A C)), whichever simplifies.  We can only do
11035 	     this for equality comparisons due to pathological cases involving
11036 	     overflows.  */
11037 	  if (equality_comparison_p
11038 	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
11039 							XEXP (op0, 1), op1)))
11040 	    {
11041 	      op0 = XEXP (op0, 0);
11042 	      op1 = tem;
11043 	      continue;
11044 	    }
11045 
11046 	  if (equality_comparison_p
11047 	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
11048 							XEXP (op0, 0), op1)))
11049 	    {
11050 	      op0 = XEXP (op0, 1);
11051 	      op1 = tem;
11052 	      continue;
11053 	    }
11054 
11055 	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
11056 	     of bits in X minus 1, is one iff X > 0.  */
11057 	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
11058 	      && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11059 	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
11060 		 == mode_width - 1
11061 	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11062 	    {
11063 	      op0 = XEXP (op0, 1);
11064 	      code = (code == GE ? LE : GT);
11065 	      continue;
11066 	    }
11067 	  break;
11068 
11069 	case XOR:
11070 	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
11071 	     if C is zero or B is a constant.  */
11072 	  if (equality_comparison_p
11073 	      && 0 != (tem = simplify_binary_operation (XOR, mode,
11074 							XEXP (op0, 1), op1)))
11075 	    {
11076 	      op0 = XEXP (op0, 0);
11077 	      op1 = tem;
11078 	      continue;
11079 	    }
11080 	  break;
11081 
11082 	case EQ:  case NE:
11083 	case UNEQ:  case LTGT:
11084 	case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
11085 	case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
11086 	case UNORDERED: case ORDERED:
11087 	  /* We can't do anything if OP0 is a condition code value, rather
11088 	     than an actual data value.  */
11089 	  if (const_op != 0
11090 	      || CC0_P (XEXP (op0, 0))
11091 	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
11092 	    break;
11093 
11094 	  /* Get the two operands being compared.  */
11095 	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
11096 	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
11097 	  else
11098 	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
11099 
11100 	  /* Check for the cases where we simply want the result of the
11101 	     earlier test or the opposite of that result.  */
11102 	  if (code == NE || code == EQ
11103 	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
11104 		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11105 		  && (STORE_FLAG_VALUE
11106 		      & (((HOST_WIDE_INT) 1
11107 			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
11108 		  && (code == LT || code == GE)))
11109 	    {
11110 	      enum rtx_code new_code;
11111 	      if (code == LT || code == NE)
11112 		new_code = GET_CODE (op0);
11113 	      else
11114 		new_code = reversed_comparison_code (op0, NULL);
11115 
11116 	      if (new_code != UNKNOWN)
11117 		{
11118 		  code = new_code;
11119 		  op0 = tem;
11120 		  op1 = tem1;
11121 		  continue;
11122 		}
11123 	    }
11124 	  break;
11125 
11126 	case IOR:
11127 	  /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
11128 	     iff X <= 0.  */
11129 	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
11130 	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
11131 	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11132 	    {
11133 	      op0 = XEXP (op0, 1);
11134 	      code = (code == GE ? GT : LE);
11135 	      continue;
11136 	    }
11137 	  break;
11138 
11139 	case AND:
11140 	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
11141 	     will be converted to a ZERO_EXTRACT later.  */
11142 	  if (const_op == 0 && equality_comparison_p
11143 	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
11144 	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
11145 	    {
11146 	      op0 = simplify_and_const_int
11147 		(NULL_RTX, mode, gen_rtx_LSHIFTRT (mode,
11148 						   XEXP (op0, 1),
11149 						   XEXP (XEXP (op0, 0), 1)),
11150 		 (HOST_WIDE_INT) 1);
11151 	      continue;
11152 	    }
11153 
11154 	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
11155 	     zero and X is a comparison and C1 and C2 describe only bits set
11156 	     in STORE_FLAG_VALUE, we can compare with X.  */
11157 	  if (const_op == 0 && equality_comparison_p
11158 	      && mode_width <= HOST_BITS_PER_WIDE_INT
11159 	      && CONST_INT_P (XEXP (op0, 1))
11160 	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11161 	      && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11162 	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
11163 	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
11164 	    {
11165 	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11166 		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
11167 	      if ((~STORE_FLAG_VALUE & mask) == 0
11168 		  && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
11169 		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
11170 			  && COMPARISON_P (tem))))
11171 		{
11172 		  op0 = XEXP (XEXP (op0, 0), 0);
11173 		  continue;
11174 		}
11175 	    }
11176 
11177 	  /* If we are doing an equality comparison of an AND of a bit equal
11178 	     to the sign bit, replace this with a LT or GE comparison of
11179 	     the underlying value.  */
11180 	  if (equality_comparison_p
11181 	      && const_op == 0
11182 	      && CONST_INT_P (XEXP (op0, 1))
11183 	      && mode_width <= HOST_BITS_PER_WIDE_INT
11184 	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11185 		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11186 	    {
11187 	      op0 = XEXP (op0, 0);
11188 	      code = (code == EQ ? GE : LT);
11189 	      continue;
11190 	    }
11191 
11192 	  /* If this AND operation is really a ZERO_EXTEND from a narrower
11193 	     mode, the constant fits within that mode, and this is either an
11194 	     equality or unsigned comparison, try to do this comparison in
11195 	     the narrower mode.
11196 
11197 	     Note that in:
11198 
11199 	     (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
11200 	     -> (ne:DI (reg:SI 4) (const_int 0))
11201 
11202 	     unless TRULY_NOOP_TRUNCATION allows it or the register is
11203 	     known to hold a value of the required mode the
11204 	     transformation is invalid.  */
11205 	  if ((equality_comparison_p || unsigned_comparison_p)
11206 	      && CONST_INT_P (XEXP (op0, 1))
11207 	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
11208 				   & GET_MODE_MASK (mode))
11209 				  + 1)) >= 0
11210 	      && const_op >> i == 0
11211 	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode
11212 	      && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
11213 					 GET_MODE_BITSIZE (GET_MODE (op0)))
11214 		  || (REG_P (XEXP (op0, 0))
11215 		      && reg_truncated_to_mode (tmode, XEXP (op0, 0)))))
11216 	    {
11217 	      op0 = gen_lowpart (tmode, XEXP (op0, 0));
11218 	      continue;
11219 	    }
11220 
11221 	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
11222 	     fits in both M1 and M2 and the SUBREG is either paradoxical
11223 	     or represents the low part, permute the SUBREG and the AND
11224 	     and try again.  */
11225 	  if (GET_CODE (XEXP (op0, 0)) == SUBREG)
11226 	    {
11227 	      unsigned HOST_WIDE_INT c1;
11228 	      tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
11229 	      /* Require an integral mode, to avoid creating something like
11230 		 (AND:SF ...).  */
11231 	      if (SCALAR_INT_MODE_P (tmode)
11232 		  /* It is unsafe to commute the AND into the SUBREG if the
11233 		     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
11234 		     not defined.  As originally written the upper bits
11235 		     have a defined value due to the AND operation.
11236 		     However, if we commute the AND inside the SUBREG then
11237 		     they no longer have defined values and the meaning of
11238 		     the code has been changed.  */
11239 		  && (0
11240 #ifdef WORD_REGISTER_OPERATIONS
11241 		      || (mode_width > GET_MODE_BITSIZE (tmode)
11242 			  && mode_width <= BITS_PER_WORD)
11243 #endif
11244 		      || (mode_width <= GET_MODE_BITSIZE (tmode)
11245 			  && subreg_lowpart_p (XEXP (op0, 0))))
11246 		  && CONST_INT_P (XEXP (op0, 1))
11247 		  && mode_width <= HOST_BITS_PER_WIDE_INT
11248 		  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
11249 		  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
11250 		  && (c1 & ~GET_MODE_MASK (tmode)) == 0
11251 		  && c1 != mask
11252 		  && c1 != GET_MODE_MASK (tmode))
11253 		{
11254 		  op0 = simplify_gen_binary (AND, tmode,
11255 					     SUBREG_REG (XEXP (op0, 0)),
11256 					     gen_int_mode (c1, tmode));
11257 		  op0 = gen_lowpart (mode, op0);
11258 		  continue;
11259 		}
11260 	    }
11261 
11262 	  /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
11263 	  if (const_op == 0 && equality_comparison_p
11264 	      && XEXP (op0, 1) == const1_rtx
11265 	      && GET_CODE (XEXP (op0, 0)) == NOT)
11266 	    {
11267 	      op0 = simplify_and_const_int
11268 		(NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
11269 	      code = (code == NE ? EQ : NE);
11270 	      continue;
11271 	    }
11272 
11273 	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11274 	     (eq (and (lshiftrt X) 1) 0).
11275 	     Also handle the case where (not X) is expressed using xor.  */
11276 	  if (const_op == 0 && equality_comparison_p
11277 	      && XEXP (op0, 1) == const1_rtx
11278 	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
11279 	    {
11280 	      rtx shift_op = XEXP (XEXP (op0, 0), 0);
11281 	      rtx shift_count = XEXP (XEXP (op0, 0), 1);
11282 
11283 	      if (GET_CODE (shift_op) == NOT
11284 		  || (GET_CODE (shift_op) == XOR
11285 		      && CONST_INT_P (XEXP (shift_op, 1))
11286 		      && CONST_INT_P (shift_count)
11287 		      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
11288 		      && (INTVAL (XEXP (shift_op, 1))
11289 			  == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
11290 		{
11291 		  op0 = simplify_and_const_int
11292 		    (NULL_RTX, mode,
11293 		     gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
11294 		     (HOST_WIDE_INT) 1);
11295 		  code = (code == NE ? EQ : NE);
11296 		  continue;
11297 		}
11298 	    }
11299 	  break;
11300 
11301 	case ASHIFT:
11302 	  /* If we have (compare (ashift FOO N) (const_int C)) and
11303 	     the high order N bits of FOO (N+1 if an inequality comparison)
11304 	     are known to be zero, we can do this by comparing FOO with C
11305 	     shifted right N bits so long as the low-order N bits of C are
11306 	     zero.  */
11307 	  if (CONST_INT_P (XEXP (op0, 1))
11308 	      && INTVAL (XEXP (op0, 1)) >= 0
11309 	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11310 		  < HOST_BITS_PER_WIDE_INT)
11311 	      && ((const_op
11312 		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11313 	      && mode_width <= HOST_BITS_PER_WIDE_INT
11314 	      && (nonzero_bits (XEXP (op0, 0), mode)
11315 		  & ~(mask >> (INTVAL (XEXP (op0, 1))
11316 			       + ! equality_comparison_p))) == 0)
11317 	    {
11318 	      /* We must perform a logical shift, not an arithmetic one,
11319 		 as we want the top N bits of C to be zero.  */
11320 	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11321 
11322 	      temp >>= INTVAL (XEXP (op0, 1));
11323 	      op1 = gen_int_mode (temp, mode);
11324 	      op0 = XEXP (op0, 0);
11325 	      continue;
11326 	    }
11327 
11328 	  /* If we are doing a sign bit comparison, it means we are testing
11329 	     a particular bit.  Convert it to the appropriate AND.  */
11330 	  if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
11331 	      && mode_width <= HOST_BITS_PER_WIDE_INT)
11332 	    {
11333 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11334 					    ((HOST_WIDE_INT) 1
11335 					     << (mode_width - 1
11336 						 - INTVAL (XEXP (op0, 1)))));
11337 	      code = (code == LT ? NE : EQ);
11338 	      continue;
11339 	    }
11340 
11341 	  /* If this an equality comparison with zero and we are shifting
11342 	     the low bit to the sign bit, we can convert this to an AND of the
11343 	     low-order bit.  */
11344 	  if (const_op == 0 && equality_comparison_p
11345 	      && CONST_INT_P (XEXP (op0, 1))
11346 	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11347 		 == mode_width - 1)
11348 	    {
11349 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11350 					    (HOST_WIDE_INT) 1);
11351 	      continue;
11352 	    }
11353 	  break;
11354 
11355 	case ASHIFTRT:
11356 	  /* If this is an equality comparison with zero, we can do this
11357 	     as a logical shift, which might be much simpler.  */
11358 	  if (equality_comparison_p && const_op == 0
11359 	      && CONST_INT_P (XEXP (op0, 1)))
11360 	    {
11361 	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11362 					  XEXP (op0, 0),
11363 					  INTVAL (XEXP (op0, 1)));
11364 	      continue;
11365 	    }
11366 
11367 	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11368 	     do the comparison in a narrower mode.  */
11369 	  if (! unsigned_comparison_p
11370 	      && CONST_INT_P (XEXP (op0, 1))
11371 	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
11372 	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11373 	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11374 					 MODE_INT, 1)) != BLKmode
11375 	      && (((unsigned HOST_WIDE_INT) const_op
11376 		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11377 		  <= GET_MODE_MASK (tmode)))
11378 	    {
11379 	      op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
11380 	      continue;
11381 	    }
11382 
11383 	  /* Likewise if OP0 is a PLUS of a sign extension with a
11384 	     constant, which is usually represented with the PLUS
11385 	     between the shifts.  */
11386 	  if (! unsigned_comparison_p
11387 	      && CONST_INT_P (XEXP (op0, 1))
11388 	      && GET_CODE (XEXP (op0, 0)) == PLUS
11389 	      && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11390 	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11391 	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11392 	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11393 					 MODE_INT, 1)) != BLKmode
11394 	      && (((unsigned HOST_WIDE_INT) const_op
11395 		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11396 		  <= GET_MODE_MASK (tmode)))
11397 	    {
11398 	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11399 	      rtx add_const = XEXP (XEXP (op0, 0), 1);
11400 	      rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
11401 						   add_const, XEXP (op0, 1));
11402 
11403 	      op0 = simplify_gen_binary (PLUS, tmode,
11404 					 gen_lowpart (tmode, inner),
11405 					 new_const);
11406 	      continue;
11407 	    }
11408 
11409 	  /* ... fall through ...  */
11410 	case LSHIFTRT:
11411 	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11412 	     the low order N bits of FOO are known to be zero, we can do this
11413 	     by comparing FOO with C shifted left N bits so long as no
11414 	     overflow occurs.  */
11415 	  if (CONST_INT_P (XEXP (op0, 1))
11416 	      && INTVAL (XEXP (op0, 1)) >= 0
11417 	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11418 	      && mode_width <= HOST_BITS_PER_WIDE_INT
11419 	      && (nonzero_bits (XEXP (op0, 0), mode)
11420 		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11421 	      && (((unsigned HOST_WIDE_INT) const_op
11422 		   + (GET_CODE (op0) != LSHIFTRT
11423 		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11424 			 + 1)
11425 		      : 0))
11426 		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11427 	    {
11428 	      /* If the shift was logical, then we must make the condition
11429 		 unsigned.  */
11430 	      if (GET_CODE (op0) == LSHIFTRT)
11431 		code = unsigned_condition (code);
11432 
11433 	      const_op <<= INTVAL (XEXP (op0, 1));
11434 	      op1 = GEN_INT (const_op);
11435 	      op0 = XEXP (op0, 0);
11436 	      continue;
11437 	    }
11438 
11439 	  /* If we are using this shift to extract just the sign bit, we
11440 	     can replace this with an LT or GE comparison.  */
11441 	  if (const_op == 0
11442 	      && (equality_comparison_p || sign_bit_comparison_p)
11443 	      && CONST_INT_P (XEXP (op0, 1))
11444 	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11445 		 == mode_width - 1)
11446 	    {
11447 	      op0 = XEXP (op0, 0);
11448 	      code = (code == NE || code == GT ? LT : GE);
11449 	      continue;
11450 	    }
11451 	  break;
11452 
11453 	default:
11454 	  break;
11455 	}
11456 
11457       break;
11458     }
11459 
11460   /* Now make any compound operations involved in this comparison.  Then,
11461      check for an outmost SUBREG on OP0 that is not doing anything or is
11462      paradoxical.  The latter transformation must only be performed when
11463      it is known that the "extra" bits will be the same in op0 and op1 or
11464      that they don't matter.  There are three cases to consider:
11465 
11466      1. SUBREG_REG (op0) is a register.  In this case the bits are don't
11467      care bits and we can assume they have any convenient value.  So
11468      making the transformation is safe.
11469 
11470      2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11471      In this case the upper bits of op0 are undefined.  We should not make
11472      the simplification in that case as we do not know the contents of
11473      those bits.
11474 
11475      3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11476      UNKNOWN.  In that case we know those bits are zeros or ones.  We must
11477      also be sure that they are the same as the upper bits of op1.
11478 
11479      We can never remove a SUBREG for a non-equality comparison because
11480      the sign bit is in a different place in the underlying object.  */
11481 
11482   op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11483   op1 = make_compound_operation (op1, SET);
11484 
11485   if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11486       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11487       && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11488       && (code == NE || code == EQ))
11489     {
11490       if (GET_MODE_SIZE (GET_MODE (op0))
11491 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11492 	{
11493 	  /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
11494 	     implemented.  */
11495 	  if (REG_P (SUBREG_REG (op0)))
11496 	    {
11497 	      op0 = SUBREG_REG (op0);
11498 	      op1 = gen_lowpart (GET_MODE (op0), op1);
11499 	    }
11500 	}
11501       else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11502 		<= HOST_BITS_PER_WIDE_INT)
11503 	       && (nonzero_bits (SUBREG_REG (op0),
11504 				 GET_MODE (SUBREG_REG (op0)))
11505 		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11506 	{
11507 	  tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
11508 
11509 	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11510 	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11511 	    op0 = SUBREG_REG (op0), op1 = tem;
11512 	}
11513     }
11514 
11515   /* We now do the opposite procedure: Some machines don't have compare
11516      insns in all modes.  If OP0's mode is an integer mode smaller than a
11517      word and we can't do a compare in that mode, see if there is a larger
11518      mode for which we can do the compare.  There are a number of cases in
11519      which we can use the wider mode.  */
11520 
11521   mode = GET_MODE (op0);
11522   if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11523       && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11524       && ! have_insn_for (COMPARE, mode))
11525     for (tmode = GET_MODE_WIDER_MODE (mode);
11526 	 (tmode != VOIDmode
11527 	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11528 	 tmode = GET_MODE_WIDER_MODE (tmode))
11529       if (have_insn_for (COMPARE, tmode))
11530 	{
11531 	  int zero_extended;
11532 
11533 	  /* If this is a test for negative, we can make an explicit
11534 	     test of the sign bit.  Test this first so we can use
11535 	     a paradoxical subreg to extend OP0.  */
11536 
11537 	  if (op1 == const0_rtx && (code == LT || code == GE)
11538 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11539 	    {
11540 	      op0 = simplify_gen_binary (AND, tmode,
11541 					 gen_lowpart (tmode, op0),
11542 					 GEN_INT ((HOST_WIDE_INT) 1
11543 						  << (GET_MODE_BITSIZE (mode)
11544 						      - 1)));
11545 	      code = (code == LT) ? NE : EQ;
11546 	      break;
11547 	    }
11548 
11549 	  /* If the only nonzero bits in OP0 and OP1 are those in the
11550 	     narrower mode and this is an equality or unsigned comparison,
11551 	     we can use the wider mode.  Similarly for sign-extended
11552 	     values, in which case it is true for all comparisons.  */
11553 	  zero_extended = ((code == EQ || code == NE
11554 			    || code == GEU || code == GTU
11555 			    || code == LEU || code == LTU)
11556 			   && (nonzero_bits (op0, tmode)
11557 			       & ~GET_MODE_MASK (mode)) == 0
11558 			   && ((CONST_INT_P (op1)
11559 				|| (nonzero_bits (op1, tmode)
11560 				    & ~GET_MODE_MASK (mode)) == 0)));
11561 
11562 	  if (zero_extended
11563 	      || ((num_sign_bit_copies (op0, tmode)
11564 		   > (unsigned int) (GET_MODE_BITSIZE (tmode)
11565 				     - GET_MODE_BITSIZE (mode)))
11566 		  && (num_sign_bit_copies (op1, tmode)
11567 		      > (unsigned int) (GET_MODE_BITSIZE (tmode)
11568 					- GET_MODE_BITSIZE (mode)))))
11569 	    {
11570 	      /* If OP0 is an AND and we don't have an AND in MODE either,
11571 		 make a new AND in the proper mode.  */
11572 	      if (GET_CODE (op0) == AND
11573 		  && !have_insn_for (AND, mode))
11574 		op0 = simplify_gen_binary (AND, tmode,
11575 					   gen_lowpart (tmode,
11576 							XEXP (op0, 0)),
11577 					   gen_lowpart (tmode,
11578 							XEXP (op0, 1)));
11579 	      else
11580 		{
11581 		  if (zero_extended)
11582 		    {
11583 		      op0 = simplify_gen_unary (ZERO_EXTEND, tmode, op0, mode);
11584 		      op1 = simplify_gen_unary (ZERO_EXTEND, tmode, op1, mode);
11585 		    }
11586 		  else
11587 		    {
11588 		      op0 = simplify_gen_unary (SIGN_EXTEND, tmode, op0, mode);
11589 		      op1 = simplify_gen_unary (SIGN_EXTEND, tmode, op1, mode);
11590 		    }
11591 		  break;
11592 		}
11593 	    }
11594 	}
11595 
11596 #ifdef CANONICALIZE_COMPARISON
11597   /* If this machine only supports a subset of valid comparisons, see if we
11598      can convert an unsupported one into a supported one.  */
11599   CANONICALIZE_COMPARISON (code, op0, op1);
11600 #endif
11601 
11602   *pop0 = op0;
11603   *pop1 = op1;
11604 
11605   return code;
11606 }
11607 
11608 /* Utility function for record_value_for_reg.  Count number of
11609    rtxs in X.  */
11610 static int
11611 count_rtxs (rtx x)
11612 {
11613   enum rtx_code code = GET_CODE (x);
11614   const char *fmt;
11615   int i, j, ret = 1;
11616 
11617   if (GET_RTX_CLASS (code) == '2'
11618       || GET_RTX_CLASS (code) == 'c')
11619     {
11620       rtx x0 = XEXP (x, 0);
11621       rtx x1 = XEXP (x, 1);
11622 
11623       if (x0 == x1)
11624 	return 1 + 2 * count_rtxs (x0);
11625 
11626       if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11627 	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11628 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11629 	return 2 + 2 * count_rtxs (x0)
11630 	       + count_rtxs (x == XEXP (x1, 0)
11631 			     ? XEXP (x1, 1) : XEXP (x1, 0));
11632 
11633       if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11634 	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11635 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11636 	return 2 + 2 * count_rtxs (x1)
11637 	       + count_rtxs (x == XEXP (x0, 0)
11638 			     ? XEXP (x0, 1) : XEXP (x0, 0));
11639     }
11640 
11641   fmt = GET_RTX_FORMAT (code);
11642   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11643     if (fmt[i] == 'e')
11644       ret += count_rtxs (XEXP (x, i));
11645     else if (fmt[i] == 'E')
11646       for (j = 0; j < XVECLEN (x, i); j++)
11647 	ret += count_rtxs (XVECEXP (x, i, j));
11648 
11649   return ret;
11650 }
11651 
11652 /* Utility function for following routine.  Called when X is part of a value
11653    being stored into last_set_value.  Sets last_set_table_tick
11654    for each register mentioned.  Similar to mention_regs in cse.c  */
11655 
11656 static void
11657 update_table_tick (rtx x)
11658 {
11659   enum rtx_code code = GET_CODE (x);
11660   const char *fmt = GET_RTX_FORMAT (code);
11661   int i, j;
11662 
11663   if (code == REG)
11664     {
11665       unsigned int regno = REGNO (x);
11666       unsigned int endregno = END_REGNO (x);
11667       unsigned int r;
11668 
11669       for (r = regno; r < endregno; r++)
11670 	{
11671 	  reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, r);
11672 	  rsp->last_set_table_tick = label_tick;
11673 	}
11674 
11675       return;
11676     }
11677 
11678   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11679     if (fmt[i] == 'e')
11680       {
11681 	/* Check for identical subexpressions.  If x contains
11682 	   identical subexpression we only have to traverse one of
11683 	   them.  */
11684 	if (i == 0 && ARITHMETIC_P (x))
11685 	  {
11686 	    /* Note that at this point x1 has already been
11687 	       processed.  */
11688 	    rtx x0 = XEXP (x, 0);
11689 	    rtx x1 = XEXP (x, 1);
11690 
11691 	    /* If x0 and x1 are identical then there is no need to
11692 	       process x0.  */
11693 	    if (x0 == x1)
11694 	      break;
11695 
11696 	    /* If x0 is identical to a subexpression of x1 then while
11697 	       processing x1, x0 has already been processed.  Thus we
11698 	       are done with x.  */
11699 	    if (ARITHMETIC_P (x1)
11700 		&& (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11701 	      break;
11702 
11703 	    /* If x1 is identical to a subexpression of x0 then we
11704 	       still have to process the rest of x0.  */
11705 	    if (ARITHMETIC_P (x0)
11706 		&& (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11707 	      {
11708 		update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11709 		break;
11710 	      }
11711 	  }
11712 
11713 	update_table_tick (XEXP (x, i));
11714       }
11715     else if (fmt[i] == 'E')
11716       for (j = 0; j < XVECLEN (x, i); j++)
11717 	update_table_tick (XVECEXP (x, i, j));
11718 }
11719 
11720 /* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11721    are saying that the register is clobbered and we no longer know its
11722    value.  If INSN is zero, don't update reg_stat[].last_set; this is
11723    only permitted with VALUE also zero and is used to invalidate the
11724    register.  */
11725 
11726 static void
11727 record_value_for_reg (rtx reg, rtx insn, rtx value)
11728 {
11729   unsigned int regno = REGNO (reg);
11730   unsigned int endregno = END_REGNO (reg);
11731   unsigned int i;
11732   reg_stat_type *rsp;
11733 
11734   /* If VALUE contains REG and we have a previous value for REG, substitute
11735      the previous value.  */
11736   if (value && insn && reg_overlap_mentioned_p (reg, value))
11737     {
11738       rtx tem;
11739 
11740       /* Set things up so get_last_value is allowed to see anything set up to
11741 	 our insn.  */
11742       subst_low_luid = DF_INSN_LUID (insn);
11743       tem = get_last_value (reg);
11744 
11745       /* If TEM is simply a binary operation with two CLOBBERs as operands,
11746 	 it isn't going to be useful and will take a lot of time to process,
11747 	 so just use the CLOBBER.  */
11748 
11749       if (tem)
11750 	{
11751 	  if (ARITHMETIC_P (tem)
11752 	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
11753 	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11754 	    tem = XEXP (tem, 0);
11755 	  else if (count_occurrences (value, reg, 1) >= 2)
11756 	    {
11757 	      /* If there are two or more occurrences of REG in VALUE,
11758 		 prevent the value from growing too much.  */
11759 	      if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11760 		tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11761 	    }
11762 
11763 	  value = replace_rtx (copy_rtx (value), reg, tem);
11764 	}
11765     }
11766 
11767   /* For each register modified, show we don't know its value, that
11768      we don't know about its bitwise content, that its value has been
11769      updated, and that we don't know the location of the death of the
11770      register.  */
11771   for (i = regno; i < endregno; i++)
11772     {
11773       rsp = VEC_index (reg_stat_type, reg_stat, i);
11774 
11775       if (insn)
11776 	rsp->last_set = insn;
11777 
11778       rsp->last_set_value = 0;
11779       rsp->last_set_mode = VOIDmode;
11780       rsp->last_set_nonzero_bits = 0;
11781       rsp->last_set_sign_bit_copies = 0;
11782       rsp->last_death = 0;
11783       rsp->truncated_to_mode = VOIDmode;
11784     }
11785 
11786   /* Mark registers that are being referenced in this value.  */
11787   if (value)
11788     update_table_tick (value);
11789 
11790   /* Now update the status of each register being set.
11791      If someone is using this register in this block, set this register
11792      to invalid since we will get confused between the two lives in this
11793      basic block.  This makes using this register always invalid.  In cse, we
11794      scan the table to invalidate all entries using this register, but this
11795      is too much work for us.  */
11796 
11797   for (i = regno; i < endregno; i++)
11798     {
11799       rsp = VEC_index (reg_stat_type, reg_stat, i);
11800       rsp->last_set_label = label_tick;
11801       if (!insn
11802 	  || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
11803 	rsp->last_set_invalid = 1;
11804       else
11805 	rsp->last_set_invalid = 0;
11806     }
11807 
11808   /* The value being assigned might refer to X (like in "x++;").  In that
11809      case, we must replace it with (clobber (const_int 0)) to prevent
11810      infinite loops.  */
11811   rsp = VEC_index (reg_stat_type, reg_stat, regno);
11812   if (value && !get_last_value_validate (&value, insn, label_tick, 0))
11813     {
11814       value = copy_rtx (value);
11815       if (!get_last_value_validate (&value, insn, label_tick, 1))
11816 	value = 0;
11817     }
11818 
11819   /* For the main register being modified, update the value, the mode, the
11820      nonzero bits, and the number of sign bit copies.  */
11821 
11822   rsp->last_set_value = value;
11823 
11824   if (value)
11825     {
11826       enum machine_mode mode = GET_MODE (reg);
11827       subst_low_luid = DF_INSN_LUID (insn);
11828       rsp->last_set_mode = mode;
11829       if (GET_MODE_CLASS (mode) == MODE_INT
11830 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11831 	mode = nonzero_bits_mode;
11832       rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
11833       rsp->last_set_sign_bit_copies
11834 	= num_sign_bit_copies (value, GET_MODE (reg));
11835     }
11836 }
11837 
11838 /* Called via note_stores from record_dead_and_set_regs to handle one
11839    SET or CLOBBER in an insn.  DATA is the instruction in which the
11840    set is occurring.  */
11841 
11842 static void
11843 record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
11844 {
11845   rtx record_dead_insn = (rtx) data;
11846 
11847   if (GET_CODE (dest) == SUBREG)
11848     dest = SUBREG_REG (dest);
11849 
11850   if (!record_dead_insn)
11851     {
11852       if (REG_P (dest))
11853 	record_value_for_reg (dest, NULL_RTX, NULL_RTX);
11854       return;
11855     }
11856 
11857   if (REG_P (dest))
11858     {
11859       /* If we are setting the whole register, we know its value.  Otherwise
11860 	 show that we don't know the value.  We can handle SUBREG in
11861 	 some cases.  */
11862       if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11863 	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11864       else if (GET_CODE (setter) == SET
11865 	       && GET_CODE (SET_DEST (setter)) == SUBREG
11866 	       && SUBREG_REG (SET_DEST (setter)) == dest
11867 	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11868 	       && subreg_lowpart_p (SET_DEST (setter)))
11869 	record_value_for_reg (dest, record_dead_insn,
11870 			      gen_lowpart (GET_MODE (dest),
11871 						       SET_SRC (setter)));
11872       else
11873 	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11874     }
11875   else if (MEM_P (dest)
11876 	   /* Ignore pushes, they clobber nothing.  */
11877 	   && ! push_operand (dest, GET_MODE (dest)))
11878     mem_last_set = DF_INSN_LUID (record_dead_insn);
11879 }
11880 
11881 /* Update the records of when each REG was most recently set or killed
11882    for the things done by INSN.  This is the last thing done in processing
11883    INSN in the combiner loop.
11884 
11885    We update reg_stat[], in particular fields last_set, last_set_value,
11886    last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11887    last_death, and also the similar information mem_last_set (which insn
11888    most recently modified memory) and last_call_luid (which insn was the
11889    most recent subroutine call).  */
11890 
11891 static void
11892 record_dead_and_set_regs (rtx insn)
11893 {
11894   rtx link;
11895   unsigned int i;
11896 
11897   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11898     {
11899       if (REG_NOTE_KIND (link) == REG_DEAD
11900 	  && REG_P (XEXP (link, 0)))
11901 	{
11902 	  unsigned int regno = REGNO (XEXP (link, 0));
11903 	  unsigned int endregno = END_REGNO (XEXP (link, 0));
11904 
11905 	  for (i = regno; i < endregno; i++)
11906 	    {
11907 	      reg_stat_type *rsp;
11908 
11909 	      rsp = VEC_index (reg_stat_type, reg_stat, i);
11910 	      rsp->last_death = insn;
11911 	    }
11912 	}
11913       else if (REG_NOTE_KIND (link) == REG_INC)
11914 	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11915     }
11916 
11917   if (CALL_P (insn))
11918     {
11919       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11920 	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11921 	  {
11922 	    reg_stat_type *rsp;
11923 
11924 	    rsp = VEC_index (reg_stat_type, reg_stat, i);
11925 	    rsp->last_set_invalid = 1;
11926 	    rsp->last_set = insn;
11927 	    rsp->last_set_value = 0;
11928 	    rsp->last_set_mode = VOIDmode;
11929 	    rsp->last_set_nonzero_bits = 0;
11930 	    rsp->last_set_sign_bit_copies = 0;
11931 	    rsp->last_death = 0;
11932 	    rsp->truncated_to_mode = VOIDmode;
11933 	  }
11934 
11935       last_call_luid = mem_last_set = DF_INSN_LUID (insn);
11936 
11937       /* We can't combine into a call pattern.  Remember, though, that
11938 	 the return value register is set at this LUID.  We could
11939 	 still replace a register with the return value from the
11940 	 wrong subroutine call!  */
11941       note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
11942     }
11943   else
11944     note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11945 }
11946 
11947 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11948    register present in the SUBREG, so for each such SUBREG go back and
11949    adjust nonzero and sign bit information of the registers that are
11950    known to have some zero/sign bits set.
11951 
11952    This is needed because when combine blows the SUBREGs away, the
11953    information on zero/sign bits is lost and further combines can be
11954    missed because of that.  */
11955 
11956 static void
11957 record_promoted_value (rtx insn, rtx subreg)
11958 {
11959   rtx links, set;
11960   unsigned int regno = REGNO (SUBREG_REG (subreg));
11961   enum machine_mode mode = GET_MODE (subreg);
11962 
11963   if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11964     return;
11965 
11966   for (links = LOG_LINKS (insn); links;)
11967     {
11968       reg_stat_type *rsp;
11969 
11970       insn = XEXP (links, 0);
11971       set = single_set (insn);
11972 
11973       if (! set || !REG_P (SET_DEST (set))
11974 	  || REGNO (SET_DEST (set)) != regno
11975 	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11976 	{
11977 	  links = XEXP (links, 1);
11978 	  continue;
11979 	}
11980 
11981       rsp = VEC_index (reg_stat_type, reg_stat, regno);
11982       if (rsp->last_set == insn)
11983 	{
11984 	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11985 	    rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
11986 	}
11987 
11988       if (REG_P (SET_SRC (set)))
11989 	{
11990 	  regno = REGNO (SET_SRC (set));
11991 	  links = LOG_LINKS (insn);
11992 	}
11993       else
11994 	break;
11995     }
11996 }
11997 
11998 /* Check if X, a register, is known to contain a value already
11999    truncated to MODE.  In this case we can use a subreg to refer to
12000    the truncated value even though in the generic case we would need
12001    an explicit truncation.  */
12002 
12003 static bool
12004 reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
12005 {
12006   reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12007   enum machine_mode truncated = rsp->truncated_to_mode;
12008 
12009   if (truncated == 0
12010       || rsp->truncation_label < label_tick_ebb_start)
12011     return false;
12012   if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
12013     return true;
12014   if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
12015 			     GET_MODE_BITSIZE (truncated)))
12016     return true;
12017   return false;
12018 }
12019 
12020 /* Callback for for_each_rtx.  If *P is a hard reg or a subreg record the mode
12021    that the register is accessed in.  For non-TRULY_NOOP_TRUNCATION targets we
12022    might be able to turn a truncate into a subreg using this information.
12023    Return -1 if traversing *P is complete or 0 otherwise.  */
12024 
12025 static int
12026 record_truncated_value (rtx *p, void *data ATTRIBUTE_UNUSED)
12027 {
12028   rtx x = *p;
12029   enum machine_mode truncated_mode;
12030   reg_stat_type *rsp;
12031 
12032   if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
12033     {
12034       enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
12035       truncated_mode = GET_MODE (x);
12036 
12037       if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
12038 	return -1;
12039 
12040       if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode),
12041 				 GET_MODE_BITSIZE (original_mode)))
12042 	return -1;
12043 
12044       x = SUBREG_REG (x);
12045     }
12046   /* ??? For hard-regs we now record everything.  We might be able to
12047      optimize this using last_set_mode.  */
12048   else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
12049     truncated_mode = GET_MODE (x);
12050   else
12051     return 0;
12052 
12053   rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12054   if (rsp->truncated_to_mode == 0
12055       || rsp->truncation_label < label_tick_ebb_start
12056       || (GET_MODE_SIZE (truncated_mode)
12057 	  < GET_MODE_SIZE (rsp->truncated_to_mode)))
12058     {
12059       rsp->truncated_to_mode = truncated_mode;
12060       rsp->truncation_label = label_tick;
12061     }
12062 
12063   return -1;
12064 }
12065 
12066 /* Callback for note_uses.  Find hardregs and subregs of pseudos and
12067    the modes they are used in.  This can help truning TRUNCATEs into
12068    SUBREGs.  */
12069 
12070 static void
12071 record_truncated_values (rtx *x, void *data ATTRIBUTE_UNUSED)
12072 {
12073   for_each_rtx (x, record_truncated_value, NULL);
12074 }
12075 
12076 /* Scan X for promoted SUBREGs.  For each one found,
12077    note what it implies to the registers used in it.  */
12078 
12079 static void
12080 check_promoted_subreg (rtx insn, rtx x)
12081 {
12082   if (GET_CODE (x) == SUBREG
12083       && SUBREG_PROMOTED_VAR_P (x)
12084       && REG_P (SUBREG_REG (x)))
12085     record_promoted_value (insn, x);
12086   else
12087     {
12088       const char *format = GET_RTX_FORMAT (GET_CODE (x));
12089       int i, j;
12090 
12091       for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
12092 	switch (format[i])
12093 	  {
12094 	  case 'e':
12095 	    check_promoted_subreg (insn, XEXP (x, i));
12096 	    break;
12097 	  case 'V':
12098 	  case 'E':
12099 	    if (XVEC (x, i) != 0)
12100 	      for (j = 0; j < XVECLEN (x, i); j++)
12101 		check_promoted_subreg (insn, XVECEXP (x, i, j));
12102 	    break;
12103 	  }
12104     }
12105 }
12106 
12107 /* Verify that all the registers and memory references mentioned in *LOC are
12108    still valid.  *LOC was part of a value set in INSN when label_tick was
12109    equal to TICK.  Return 0 if some are not.  If REPLACE is nonzero, replace
12110    the invalid references with (clobber (const_int 0)) and return 1.  This
12111    replacement is useful because we often can get useful information about
12112    the form of a value (e.g., if it was produced by a shift that always
12113    produces -1 or 0) even though we don't know exactly what registers it
12114    was produced from.  */
12115 
12116 static int
12117 get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
12118 {
12119   rtx x = *loc;
12120   const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
12121   int len = GET_RTX_LENGTH (GET_CODE (x));
12122   int i, j;
12123 
12124   if (REG_P (x))
12125     {
12126       unsigned int regno = REGNO (x);
12127       unsigned int endregno = END_REGNO (x);
12128       unsigned int j;
12129 
12130       for (j = regno; j < endregno; j++)
12131 	{
12132 	  reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, j);
12133 	  if (rsp->last_set_invalid
12134 	      /* If this is a pseudo-register that was only set once and not
12135 		 live at the beginning of the function, it is always valid.  */
12136 	      || (! (regno >= FIRST_PSEUDO_REGISTER
12137 		     && REG_N_SETS (regno) == 1
12138 		     && (!REGNO_REG_SET_P
12139 			 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno)))
12140 		  && rsp->last_set_label > tick))
12141 	  {
12142 	    if (replace)
12143 	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12144 	    return replace;
12145 	  }
12146 	}
12147 
12148       return 1;
12149     }
12150   /* If this is a memory reference, make sure that there were no stores after
12151      it that might have clobbered the value.  We don't have alias info, so we
12152      assume any store invalidates it.  Moreover, we only have local UIDs, so
12153      we also assume that there were stores in the intervening basic blocks.  */
12154   else if (MEM_P (x) && !MEM_READONLY_P (x)
12155 	   && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
12156     {
12157       if (replace)
12158 	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12159       return replace;
12160     }
12161 
12162   for (i = 0; i < len; i++)
12163     {
12164       if (fmt[i] == 'e')
12165 	{
12166 	  /* Check for identical subexpressions.  If x contains
12167 	     identical subexpression we only have to traverse one of
12168 	     them.  */
12169 	  if (i == 1 && ARITHMETIC_P (x))
12170 	    {
12171 	      /* Note that at this point x0 has already been checked
12172 		 and found valid.  */
12173 	      rtx x0 = XEXP (x, 0);
12174 	      rtx x1 = XEXP (x, 1);
12175 
12176 	      /* If x0 and x1 are identical then x is also valid.  */
12177 	      if (x0 == x1)
12178 		return 1;
12179 
12180 	      /* If x1 is identical to a subexpression of x0 then
12181 		 while checking x0, x1 has already been checked.  Thus
12182 		 it is valid and so as x.  */
12183 	      if (ARITHMETIC_P (x0)
12184 		  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
12185 		return 1;
12186 
12187 	      /* If x0 is identical to a subexpression of x1 then x is
12188 		 valid iff the rest of x1 is valid.  */
12189 	      if (ARITHMETIC_P (x1)
12190 		  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
12191 		return
12192 		  get_last_value_validate (&XEXP (x1,
12193 						  x0 == XEXP (x1, 0) ? 1 : 0),
12194 					   insn, tick, replace);
12195 	    }
12196 
12197 	  if (get_last_value_validate (&XEXP (x, i), insn, tick,
12198 				       replace) == 0)
12199 	    return 0;
12200 	}
12201       else if (fmt[i] == 'E')
12202 	for (j = 0; j < XVECLEN (x, i); j++)
12203 	  if (get_last_value_validate (&XVECEXP (x, i, j),
12204 				       insn, tick, replace) == 0)
12205 	    return 0;
12206     }
12207 
12208   /* If we haven't found a reason for it to be invalid, it is valid.  */
12209   return 1;
12210 }
12211 
12212 /* Get the last value assigned to X, if known.  Some registers
12213    in the value may be replaced with (clobber (const_int 0)) if their value
12214    is known longer known reliably.  */
12215 
12216 static rtx
12217 get_last_value (const_rtx x)
12218 {
12219   unsigned int regno;
12220   rtx value;
12221   reg_stat_type *rsp;
12222 
12223   /* If this is a non-paradoxical SUBREG, get the value of its operand and
12224      then convert it to the desired mode.  If this is a paradoxical SUBREG,
12225      we cannot predict what values the "extra" bits might have.  */
12226   if (GET_CODE (x) == SUBREG
12227       && subreg_lowpart_p (x)
12228       && (GET_MODE_SIZE (GET_MODE (x))
12229 	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
12230       && (value = get_last_value (SUBREG_REG (x))) != 0)
12231     return gen_lowpart (GET_MODE (x), value);
12232 
12233   if (!REG_P (x))
12234     return 0;
12235 
12236   regno = REGNO (x);
12237   rsp = VEC_index (reg_stat_type, reg_stat, regno);
12238   value = rsp->last_set_value;
12239 
12240   /* If we don't have a value, or if it isn't for this basic block and
12241      it's either a hard register, set more than once, or it's a live
12242      at the beginning of the function, return 0.
12243 
12244      Because if it's not live at the beginning of the function then the reg
12245      is always set before being used (is never used without being set).
12246      And, if it's set only once, and it's always set before use, then all
12247      uses must have the same last value, even if it's not from this basic
12248      block.  */
12249 
12250   if (value == 0
12251       || (rsp->last_set_label < label_tick_ebb_start
12252 	  && (regno < FIRST_PSEUDO_REGISTER
12253 	      || REG_N_SETS (regno) != 1
12254 	      || REGNO_REG_SET_P
12255 		 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno))))
12256     return 0;
12257 
12258   /* If the value was set in a later insn than the ones we are processing,
12259      we can't use it even if the register was only set once.  */
12260   if (rsp->last_set_label == label_tick
12261       && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
12262     return 0;
12263 
12264   /* If the value has all its registers valid, return it.  */
12265   if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 0))
12266     return value;
12267 
12268   /* Otherwise, make a copy and replace any invalid register with
12269      (clobber (const_int 0)).  If that fails for some reason, return 0.  */
12270 
12271   value = copy_rtx (value);
12272   if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 1))
12273     return value;
12274 
12275   return 0;
12276 }
12277 
12278 /* Return nonzero if expression X refers to a REG or to memory
12279    that is set in an instruction more recent than FROM_LUID.  */
12280 
12281 static int
12282 use_crosses_set_p (const_rtx x, int from_luid)
12283 {
12284   const char *fmt;
12285   int i;
12286   enum rtx_code code = GET_CODE (x);
12287 
12288   if (code == REG)
12289     {
12290       unsigned int regno = REGNO (x);
12291       unsigned endreg = END_REGNO (x);
12292 
12293 #ifdef PUSH_ROUNDING
12294       /* Don't allow uses of the stack pointer to be moved,
12295 	 because we don't know whether the move crosses a push insn.  */
12296       if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
12297 	return 1;
12298 #endif
12299       for (; regno < endreg; regno++)
12300 	{
12301 	  reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
12302 	  if (rsp->last_set
12303 	      && rsp->last_set_label == label_tick
12304 	      && DF_INSN_LUID (rsp->last_set) > from_luid)
12305 	    return 1;
12306 	}
12307       return 0;
12308     }
12309 
12310   if (code == MEM && mem_last_set > from_luid)
12311     return 1;
12312 
12313   fmt = GET_RTX_FORMAT (code);
12314 
12315   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12316     {
12317       if (fmt[i] == 'E')
12318 	{
12319 	  int j;
12320 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12321 	    if (use_crosses_set_p (XVECEXP (x, i, j), from_luid))
12322 	      return 1;
12323 	}
12324       else if (fmt[i] == 'e'
12325 	       && use_crosses_set_p (XEXP (x, i), from_luid))
12326 	return 1;
12327     }
12328   return 0;
12329 }
12330 
12331 /* Define three variables used for communication between the following
12332    routines.  */
12333 
12334 static unsigned int reg_dead_regno, reg_dead_endregno;
12335 static int reg_dead_flag;
12336 
12337 /* Function called via note_stores from reg_dead_at_p.
12338 
12339    If DEST is within [reg_dead_regno, reg_dead_endregno), set
12340    reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
12341 
12342 static void
12343 reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
12344 {
12345   unsigned int regno, endregno;
12346 
12347   if (!REG_P (dest))
12348     return;
12349 
12350   regno = REGNO (dest);
12351   endregno = END_REGNO (dest);
12352   if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12353     reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12354 }
12355 
12356 /* Return nonzero if REG is known to be dead at INSN.
12357 
12358    We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
12359    referencing REG, it is dead.  If we hit a SET referencing REG, it is
12360    live.  Otherwise, see if it is live or dead at the start of the basic
12361    block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
12362    must be assumed to be always live.  */
12363 
12364 static int
12365 reg_dead_at_p (rtx reg, rtx insn)
12366 {
12367   basic_block block;
12368   unsigned int i;
12369 
12370   /* Set variables for reg_dead_at_p_1.  */
12371   reg_dead_regno = REGNO (reg);
12372   reg_dead_endregno = END_REGNO (reg);
12373 
12374   reg_dead_flag = 0;
12375 
12376   /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  For fixed registers
12377      we allow the machine description to decide whether use-and-clobber
12378      patterns are OK.  */
12379   if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12380     {
12381       for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12382 	if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
12383 	  return 0;
12384     }
12385 
12386   /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
12387      beginning of basic block.  */
12388   block = BLOCK_FOR_INSN (insn);
12389   for (;;)
12390     {
12391       if (INSN_P (insn))
12392         {
12393 	  note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12394 	  if (reg_dead_flag)
12395 	    return reg_dead_flag == 1 ? 1 : 0;
12396 
12397 	  if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12398 	    return 1;
12399         }
12400 
12401       if (insn == BB_HEAD (block))
12402 	break;
12403 
12404       insn = PREV_INSN (insn);
12405     }
12406 
12407   /* Look at live-in sets for the basic block that we were in.  */
12408   for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12409     if (REGNO_REG_SET_P (df_get_live_in (block), i))
12410       return 0;
12411 
12412   return 1;
12413 }
12414 
12415 /* Note hard registers in X that are used.  */
12416 
12417 static void
12418 mark_used_regs_combine (rtx x)
12419 {
12420   RTX_CODE code = GET_CODE (x);
12421   unsigned int regno;
12422   int i;
12423 
12424   switch (code)
12425     {
12426     case LABEL_REF:
12427     case SYMBOL_REF:
12428     case CONST_INT:
12429     case CONST:
12430     case CONST_DOUBLE:
12431     case CONST_VECTOR:
12432     case PC:
12433     case ADDR_VEC:
12434     case ADDR_DIFF_VEC:
12435     case ASM_INPUT:
12436 #ifdef HAVE_cc0
12437     /* CC0 must die in the insn after it is set, so we don't need to take
12438        special note of it here.  */
12439     case CC0:
12440 #endif
12441       return;
12442 
12443     case CLOBBER:
12444       /* If we are clobbering a MEM, mark any hard registers inside the
12445 	 address as used.  */
12446       if (MEM_P (XEXP (x, 0)))
12447 	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12448       return;
12449 
12450     case REG:
12451       regno = REGNO (x);
12452       /* A hard reg in a wide mode may really be multiple registers.
12453 	 If so, mark all of them just like the first.  */
12454       if (regno < FIRST_PSEUDO_REGISTER)
12455 	{
12456 	  /* None of this applies to the stack, frame or arg pointers.  */
12457 	  if (regno == STACK_POINTER_REGNUM
12458 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12459 	      || regno == HARD_FRAME_POINTER_REGNUM
12460 #endif
12461 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12462 	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12463 #endif
12464 	      || regno == FRAME_POINTER_REGNUM)
12465 	    return;
12466 
12467 	  add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
12468 	}
12469       return;
12470 
12471     case SET:
12472       {
12473 	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12474 	   the address.  */
12475 	rtx testreg = SET_DEST (x);
12476 
12477 	while (GET_CODE (testreg) == SUBREG
12478 	       || GET_CODE (testreg) == ZERO_EXTRACT
12479 	       || GET_CODE (testreg) == STRICT_LOW_PART)
12480 	  testreg = XEXP (testreg, 0);
12481 
12482 	if (MEM_P (testreg))
12483 	  mark_used_regs_combine (XEXP (testreg, 0));
12484 
12485 	mark_used_regs_combine (SET_SRC (x));
12486       }
12487       return;
12488 
12489     default:
12490       break;
12491     }
12492 
12493   /* Recursively scan the operands of this expression.  */
12494 
12495   {
12496     const char *fmt = GET_RTX_FORMAT (code);
12497 
12498     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12499       {
12500 	if (fmt[i] == 'e')
12501 	  mark_used_regs_combine (XEXP (x, i));
12502 	else if (fmt[i] == 'E')
12503 	  {
12504 	    int j;
12505 
12506 	    for (j = 0; j < XVECLEN (x, i); j++)
12507 	      mark_used_regs_combine (XVECEXP (x, i, j));
12508 	  }
12509       }
12510   }
12511 }
12512 
12513 /* Remove register number REGNO from the dead registers list of INSN.
12514 
12515    Return the note used to record the death, if there was one.  */
12516 
12517 rtx
12518 remove_death (unsigned int regno, rtx insn)
12519 {
12520   rtx note = find_regno_note (insn, REG_DEAD, regno);
12521 
12522   if (note)
12523     remove_note (insn, note);
12524 
12525   return note;
12526 }
12527 
12528 /* For each register (hardware or pseudo) used within expression X, if its
12529    death is in an instruction with luid between FROM_LUID (inclusive) and
12530    TO_INSN (exclusive), put a REG_DEAD note for that register in the
12531    list headed by PNOTES.
12532 
12533    That said, don't move registers killed by maybe_kill_insn.
12534 
12535    This is done when X is being merged by combination into TO_INSN.  These
12536    notes will then be distributed as needed.  */
12537 
12538 static void
12539 move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx to_insn,
12540 	     rtx *pnotes)
12541 {
12542   const char *fmt;
12543   int len, i;
12544   enum rtx_code code = GET_CODE (x);
12545 
12546   if (code == REG)
12547     {
12548       unsigned int regno = REGNO (x);
12549       rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno)->last_death;
12550 
12551       /* Don't move the register if it gets killed in between from and to.  */
12552       if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12553 	  && ! reg_referenced_p (x, maybe_kill_insn))
12554 	return;
12555 
12556       if (where_dead
12557 	  && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
12558 	  && DF_INSN_LUID (where_dead) >= from_luid
12559 	  && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
12560 	{
12561 	  rtx note = remove_death (regno, where_dead);
12562 
12563 	  /* It is possible for the call above to return 0.  This can occur
12564 	     when last_death points to I2 or I1 that we combined with.
12565 	     In that case make a new note.
12566 
12567 	     We must also check for the case where X is a hard register
12568 	     and NOTE is a death note for a range of hard registers
12569 	     including X.  In that case, we must put REG_DEAD notes for
12570 	     the remaining registers in place of NOTE.  */
12571 
12572 	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12573 	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12574 		  > GET_MODE_SIZE (GET_MODE (x))))
12575 	    {
12576 	      unsigned int deadregno = REGNO (XEXP (note, 0));
12577 	      unsigned int deadend = END_HARD_REGNO (XEXP (note, 0));
12578 	      unsigned int ourend = END_HARD_REGNO (x);
12579 	      unsigned int i;
12580 
12581 	      for (i = deadregno; i < deadend; i++)
12582 		if (i < regno || i >= ourend)
12583 		  add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
12584 	    }
12585 
12586 	  /* If we didn't find any note, or if we found a REG_DEAD note that
12587 	     covers only part of the given reg, and we have a multi-reg hard
12588 	     register, then to be safe we must check for REG_DEAD notes
12589 	     for each register other than the first.  They could have
12590 	     their own REG_DEAD notes lying around.  */
12591 	  else if ((note == 0
12592 		    || (note != 0
12593 			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12594 			    < GET_MODE_SIZE (GET_MODE (x)))))
12595 		   && regno < FIRST_PSEUDO_REGISTER
12596 		   && hard_regno_nregs[regno][GET_MODE (x)] > 1)
12597 	    {
12598 	      unsigned int ourend = END_HARD_REGNO (x);
12599 	      unsigned int i, offset;
12600 	      rtx oldnotes = 0;
12601 
12602 	      if (note)
12603 		offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
12604 	      else
12605 		offset = 1;
12606 
12607 	      for (i = regno + offset; i < ourend; i++)
12608 		move_deaths (regno_reg_rtx[i],
12609 			     maybe_kill_insn, from_luid, to_insn, &oldnotes);
12610 	    }
12611 
12612 	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12613 	    {
12614 	      XEXP (note, 1) = *pnotes;
12615 	      *pnotes = note;
12616 	    }
12617 	  else
12618 	    *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
12619 	}
12620 
12621       return;
12622     }
12623 
12624   else if (GET_CODE (x) == SET)
12625     {
12626       rtx dest = SET_DEST (x);
12627 
12628       move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
12629 
12630       /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12631 	 that accesses one word of a multi-word item, some
12632 	 piece of everything register in the expression is used by
12633 	 this insn, so remove any old death.  */
12634       /* ??? So why do we test for equality of the sizes?  */
12635 
12636       if (GET_CODE (dest) == ZERO_EXTRACT
12637 	  || GET_CODE (dest) == STRICT_LOW_PART
12638 	  || (GET_CODE (dest) == SUBREG
12639 	      && (((GET_MODE_SIZE (GET_MODE (dest))
12640 		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12641 		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12642 		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12643 	{
12644 	  move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
12645 	  return;
12646 	}
12647 
12648       /* If this is some other SUBREG, we know it replaces the entire
12649 	 value, so use that as the destination.  */
12650       if (GET_CODE (dest) == SUBREG)
12651 	dest = SUBREG_REG (dest);
12652 
12653       /* If this is a MEM, adjust deaths of anything used in the address.
12654 	 For a REG (the only other possibility), the entire value is
12655 	 being replaced so the old value is not used in this insn.  */
12656 
12657       if (MEM_P (dest))
12658 	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
12659 		     to_insn, pnotes);
12660       return;
12661     }
12662 
12663   else if (GET_CODE (x) == CLOBBER)
12664     return;
12665 
12666   len = GET_RTX_LENGTH (code);
12667   fmt = GET_RTX_FORMAT (code);
12668 
12669   for (i = 0; i < len; i++)
12670     {
12671       if (fmt[i] == 'E')
12672 	{
12673 	  int j;
12674 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12675 	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
12676 			 to_insn, pnotes);
12677 	}
12678       else if (fmt[i] == 'e')
12679 	move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
12680     }
12681 }
12682 
12683 /* Return 1 if X is the target of a bit-field assignment in BODY, the
12684    pattern of an insn.  X must be a REG.  */
12685 
12686 static int
12687 reg_bitfield_target_p (rtx x, rtx body)
12688 {
12689   int i;
12690 
12691   if (GET_CODE (body) == SET)
12692     {
12693       rtx dest = SET_DEST (body);
12694       rtx target;
12695       unsigned int regno, tregno, endregno, endtregno;
12696 
12697       if (GET_CODE (dest) == ZERO_EXTRACT)
12698 	target = XEXP (dest, 0);
12699       else if (GET_CODE (dest) == STRICT_LOW_PART)
12700 	target = SUBREG_REG (XEXP (dest, 0));
12701       else
12702 	return 0;
12703 
12704       if (GET_CODE (target) == SUBREG)
12705 	target = SUBREG_REG (target);
12706 
12707       if (!REG_P (target))
12708 	return 0;
12709 
12710       tregno = REGNO (target), regno = REGNO (x);
12711       if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12712 	return target == x;
12713 
12714       endtregno = end_hard_regno (GET_MODE (target), tregno);
12715       endregno = end_hard_regno (GET_MODE (x), regno);
12716 
12717       return endregno > tregno && regno < endtregno;
12718     }
12719 
12720   else if (GET_CODE (body) == PARALLEL)
12721     for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12722       if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12723 	return 1;
12724 
12725   return 0;
12726 }
12727 
12728 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12729    as appropriate.  I3 and I2 are the insns resulting from the combination
12730    insns including FROM (I2 may be zero).
12731 
12732    ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12733    not need REG_DEAD notes because they are being substituted for.  This
12734    saves searching in the most common cases.
12735 
12736    Each note in the list is either ignored or placed on some insns, depending
12737    on the type of note.  */
12738 
12739 static void
12740 distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
12741 		  rtx elim_i1)
12742 {
12743   rtx note, next_note;
12744   rtx tem;
12745 
12746   for (note = notes; note; note = next_note)
12747     {
12748       rtx place = 0, place2 = 0;
12749 
12750       next_note = XEXP (note, 1);
12751       switch (REG_NOTE_KIND (note))
12752 	{
12753 	case REG_BR_PROB:
12754 	case REG_BR_PRED:
12755 	  /* Doesn't matter much where we put this, as long as it's somewhere.
12756 	     It is preferable to keep these notes on branches, which is most
12757 	     likely to be i3.  */
12758 	  place = i3;
12759 	  break;
12760 
12761 	case REG_VALUE_PROFILE:
12762 	  /* Just get rid of this note, as it is unused later anyway.  */
12763 	  break;
12764 
12765 	case REG_NON_LOCAL_GOTO:
12766 	  if (JUMP_P (i3))
12767 	    place = i3;
12768 	  else
12769 	    {
12770 	      gcc_assert (i2 && JUMP_P (i2));
12771 	      place = i2;
12772 	    }
12773 	  break;
12774 
12775 	case REG_EH_REGION:
12776 	  /* These notes must remain with the call or trapping instruction.  */
12777 	  if (CALL_P (i3))
12778 	    place = i3;
12779 	  else if (i2 && CALL_P (i2))
12780 	    place = i2;
12781 	  else
12782 	    {
12783 	      gcc_assert (flag_non_call_exceptions);
12784 	      if (may_trap_p (i3))
12785 		place = i3;
12786 	      else if (i2 && may_trap_p (i2))
12787 		place = i2;
12788 	      /* ??? Otherwise assume we've combined things such that we
12789 		 can now prove that the instructions can't trap.  Drop the
12790 		 note in this case.  */
12791 	    }
12792 	  break;
12793 
12794 	case REG_NORETURN:
12795 	case REG_SETJMP:
12796 	  /* These notes must remain with the call.  It should not be
12797 	     possible for both I2 and I3 to be a call.  */
12798 	  if (CALL_P (i3))
12799 	    place = i3;
12800 	  else
12801 	    {
12802 	      gcc_assert (i2 && CALL_P (i2));
12803 	      place = i2;
12804 	    }
12805 	  break;
12806 
12807 	case REG_UNUSED:
12808 	  /* Any clobbers for i3 may still exist, and so we must process
12809 	     REG_UNUSED notes from that insn.
12810 
12811 	     Any clobbers from i2 or i1 can only exist if they were added by
12812 	     recog_for_combine.  In that case, recog_for_combine created the
12813 	     necessary REG_UNUSED notes.  Trying to keep any original
12814 	     REG_UNUSED notes from these insns can cause incorrect output
12815 	     if it is for the same register as the original i3 dest.
12816 	     In that case, we will notice that the register is set in i3,
12817 	     and then add a REG_UNUSED note for the destination of i3, which
12818 	     is wrong.  However, it is possible to have REG_UNUSED notes from
12819 	     i2 or i1 for register which were both used and clobbered, so
12820 	     we keep notes from i2 or i1 if they will turn into REG_DEAD
12821 	     notes.  */
12822 
12823 	  /* If this register is set or clobbered in I3, put the note there
12824 	     unless there is one already.  */
12825 	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12826 	    {
12827 	      if (from_insn != i3)
12828 		break;
12829 
12830 	      if (! (REG_P (XEXP (note, 0))
12831 		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12832 		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12833 		place = i3;
12834 	    }
12835 	  /* Otherwise, if this register is used by I3, then this register
12836 	     now dies here, so we must put a REG_DEAD note here unless there
12837 	     is one already.  */
12838 	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12839 		   && ! (REG_P (XEXP (note, 0))
12840 			 ? find_regno_note (i3, REG_DEAD,
12841 					    REGNO (XEXP (note, 0)))
12842 			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12843 	    {
12844 	      PUT_REG_NOTE_KIND (note, REG_DEAD);
12845 	      place = i3;
12846 	    }
12847 	  break;
12848 
12849 	case REG_EQUAL:
12850 	case REG_EQUIV:
12851 	case REG_NOALIAS:
12852 	  /* These notes say something about results of an insn.  We can
12853 	     only support them if they used to be on I3 in which case they
12854 	     remain on I3.  Otherwise they are ignored.
12855 
12856 	     If the note refers to an expression that is not a constant, we
12857 	     must also ignore the note since we cannot tell whether the
12858 	     equivalence is still true.  It might be possible to do
12859 	     slightly better than this (we only have a problem if I2DEST
12860 	     or I1DEST is present in the expression), but it doesn't
12861 	     seem worth the trouble.  */
12862 
12863 	  if (from_insn == i3
12864 	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12865 	    place = i3;
12866 	  break;
12867 
12868 	case REG_INC:
12869 	  /* These notes say something about how a register is used.  They must
12870 	     be present on any use of the register in I2 or I3.  */
12871 	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12872 	    place = i3;
12873 
12874 	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12875 	    {
12876 	      if (place)
12877 		place2 = i2;
12878 	      else
12879 		place = i2;
12880 	    }
12881 	  break;
12882 
12883 	case REG_LABEL_TARGET:
12884 	case REG_LABEL_OPERAND:
12885 	  /* This can show up in several ways -- either directly in the
12886 	     pattern, or hidden off in the constant pool with (or without?)
12887 	     a REG_EQUAL note.  */
12888 	  /* ??? Ignore the without-reg_equal-note problem for now.  */
12889 	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12890 	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12891 		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12892 		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12893 	    place = i3;
12894 
12895 	  if (i2
12896 	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12897 		  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12898 		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12899 		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12900 	    {
12901 	      if (place)
12902 		place2 = i2;
12903 	      else
12904 		place = i2;
12905 	    }
12906 
12907 	  /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
12908 	     as a JUMP_LABEL or decrement LABEL_NUSES if it's already
12909 	     there.  */
12910 	  if (place && JUMP_P (place)
12911 	      && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12912 	      && (JUMP_LABEL (place) == NULL
12913 		  || JUMP_LABEL (place) == XEXP (note, 0)))
12914 	    {
12915 	      rtx label = JUMP_LABEL (place);
12916 
12917 	      if (!label)
12918 		JUMP_LABEL (place) = XEXP (note, 0);
12919 	      else if (LABEL_P (label))
12920 		LABEL_NUSES (label)--;
12921 	    }
12922 
12923 	  if (place2 && JUMP_P (place2)
12924 	      && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12925 	      && (JUMP_LABEL (place2) == NULL
12926 		  || JUMP_LABEL (place2) == XEXP (note, 0)))
12927 	    {
12928 	      rtx label = JUMP_LABEL (place2);
12929 
12930 	      if (!label)
12931 		JUMP_LABEL (place2) = XEXP (note, 0);
12932 	      else if (LABEL_P (label))
12933 		LABEL_NUSES (label)--;
12934 	      place2 = 0;
12935 	    }
12936 	  break;
12937 
12938 	case REG_NONNEG:
12939 	  /* This note says something about the value of a register prior
12940 	     to the execution of an insn.  It is too much trouble to see
12941 	     if the note is still correct in all situations.  It is better
12942 	     to simply delete it.  */
12943 	  break;
12944 
12945 	case REG_DEAD:
12946 	  /* If we replaced the right hand side of FROM_INSN with a
12947 	     REG_EQUAL note, the original use of the dying register
12948 	     will not have been combined into I3 and I2.  In such cases,
12949 	     FROM_INSN is guaranteed to be the first of the combined
12950 	     instructions, so we simply need to search back before
12951 	     FROM_INSN for the previous use or set of this register,
12952 	     then alter the notes there appropriately.
12953 
12954 	     If the register is used as an input in I3, it dies there.
12955 	     Similarly for I2, if it is nonzero and adjacent to I3.
12956 
12957 	     If the register is not used as an input in either I3 or I2
12958 	     and it is not one of the registers we were supposed to eliminate,
12959 	     there are two possibilities.  We might have a non-adjacent I2
12960 	     or we might have somehow eliminated an additional register
12961 	     from a computation.  For example, we might have had A & B where
12962 	     we discover that B will always be zero.  In this case we will
12963 	     eliminate the reference to A.
12964 
12965 	     In both cases, we must search to see if we can find a previous
12966 	     use of A and put the death note there.  */
12967 
12968 	  if (from_insn
12969 	      && from_insn == i2mod
12970 	      && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
12971 	    tem = from_insn;
12972 	  else
12973 	    {
12974 	      if (from_insn
12975 		  && CALL_P (from_insn)
12976 		  && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12977 		place = from_insn;
12978 	      else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12979 		place = i3;
12980 	      else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
12981 		       && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12982 		place = i2;
12983 	      else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
12984 			&& !(i2mod
12985 			     && reg_overlap_mentioned_p (XEXP (note, 0),
12986 							 i2mod_old_rhs)))
12987 		       || rtx_equal_p (XEXP (note, 0), elim_i1))
12988 		break;
12989 	      tem = i3;
12990 	    }
12991 
12992 	  if (place == 0)
12993 	    {
12994 	      basic_block bb = this_basic_block;
12995 
12996 	      for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem))
12997 		{
12998 		  if (!NONDEBUG_INSN_P (tem))
12999 		    {
13000 		      if (tem == BB_HEAD (bb))
13001 			break;
13002 		      continue;
13003 		    }
13004 
13005 		  /* If the register is being set at TEM, see if that is all
13006 		     TEM is doing.  If so, delete TEM.  Otherwise, make this
13007 		     into a REG_UNUSED note instead. Don't delete sets to
13008 		     global register vars.  */
13009 		  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
13010 		       || !global_regs[REGNO (XEXP (note, 0))])
13011 		      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
13012 		    {
13013 		      rtx set = single_set (tem);
13014 		      rtx inner_dest = 0;
13015 #ifdef HAVE_cc0
13016 		      rtx cc0_setter = NULL_RTX;
13017 #endif
13018 
13019 		      if (set != 0)
13020 			for (inner_dest = SET_DEST (set);
13021 			     (GET_CODE (inner_dest) == STRICT_LOW_PART
13022 			      || GET_CODE (inner_dest) == SUBREG
13023 			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
13024 			     inner_dest = XEXP (inner_dest, 0))
13025 			  ;
13026 
13027 		      /* Verify that it was the set, and not a clobber that
13028 			 modified the register.
13029 
13030 			 CC0 targets must be careful to maintain setter/user
13031 			 pairs.  If we cannot delete the setter due to side
13032 			 effects, mark the user with an UNUSED note instead
13033 			 of deleting it.  */
13034 
13035 		      if (set != 0 && ! side_effects_p (SET_SRC (set))
13036 			  && rtx_equal_p (XEXP (note, 0), inner_dest)
13037 #ifdef HAVE_cc0
13038 			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
13039 			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
13040 				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
13041 #endif
13042 			  )
13043 			{
13044 			  /* Move the notes and links of TEM elsewhere.
13045 			     This might delete other dead insns recursively.
13046 			     First set the pattern to something that won't use
13047 			     any register.  */
13048 			  rtx old_notes = REG_NOTES (tem);
13049 
13050 			  PATTERN (tem) = pc_rtx;
13051 			  REG_NOTES (tem) = NULL;
13052 
13053 			  distribute_notes (old_notes, tem, tem, NULL_RTX,
13054 					    NULL_RTX, NULL_RTX);
13055 			  distribute_links (LOG_LINKS (tem));
13056 
13057 			  SET_INSN_DELETED (tem);
13058 			  if (tem == i2)
13059 			    i2 = NULL_RTX;
13060 
13061 #ifdef HAVE_cc0
13062 			  /* Delete the setter too.  */
13063 			  if (cc0_setter)
13064 			    {
13065 			      PATTERN (cc0_setter) = pc_rtx;
13066 			      old_notes = REG_NOTES (cc0_setter);
13067 			      REG_NOTES (cc0_setter) = NULL;
13068 
13069 			      distribute_notes (old_notes, cc0_setter,
13070 						cc0_setter, NULL_RTX,
13071 						NULL_RTX, NULL_RTX);
13072 			      distribute_links (LOG_LINKS (cc0_setter));
13073 
13074 			      SET_INSN_DELETED (cc0_setter);
13075 			      if (cc0_setter == i2)
13076 				i2 = NULL_RTX;
13077 			    }
13078 #endif
13079 			}
13080 		      else
13081 			{
13082 			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
13083 
13084 			  /*  If there isn't already a REG_UNUSED note, put one
13085 			      here.  Do not place a REG_DEAD note, even if
13086 			      the register is also used here; that would not
13087 			      match the algorithm used in lifetime analysis
13088 			      and can cause the consistency check in the
13089 			      scheduler to fail.  */
13090 			  if (! find_regno_note (tem, REG_UNUSED,
13091 						 REGNO (XEXP (note, 0))))
13092 			    place = tem;
13093 			  break;
13094 			}
13095 		    }
13096 		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
13097 			   || (CALL_P (tem)
13098 			       && find_reg_fusage (tem, USE, XEXP (note, 0))))
13099 		    {
13100 		      place = tem;
13101 
13102 		      /* If we are doing a 3->2 combination, and we have a
13103 			 register which formerly died in i3 and was not used
13104 			 by i2, which now no longer dies in i3 and is used in
13105 			 i2 but does not die in i2, and place is between i2
13106 			 and i3, then we may need to move a link from place to
13107 			 i2.  */
13108 		      if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
13109 			  && from_insn
13110 			  && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
13111 			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
13112 			{
13113 			  rtx links = LOG_LINKS (place);
13114 			  LOG_LINKS (place) = 0;
13115 			  distribute_links (links);
13116 			}
13117 		      break;
13118 		    }
13119 
13120 		  if (tem == BB_HEAD (bb))
13121 		    break;
13122 		}
13123 
13124 	    }
13125 
13126 	  /* If the register is set or already dead at PLACE, we needn't do
13127 	     anything with this note if it is still a REG_DEAD note.
13128 	     We check here if it is set at all, not if is it totally replaced,
13129 	     which is what `dead_or_set_p' checks, so also check for it being
13130 	     set partially.  */
13131 
13132 	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
13133 	    {
13134 	      unsigned int regno = REGNO (XEXP (note, 0));
13135 	      reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
13136 
13137 	      if (dead_or_set_p (place, XEXP (note, 0))
13138 		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
13139 		{
13140 		  /* Unless the register previously died in PLACE, clear
13141 		     last_death.  [I no longer understand why this is
13142 		     being done.] */
13143 		  if (rsp->last_death != place)
13144 		    rsp->last_death = 0;
13145 		  place = 0;
13146 		}
13147 	      else
13148 		rsp->last_death = place;
13149 
13150 	      /* If this is a death note for a hard reg that is occupying
13151 		 multiple registers, ensure that we are still using all
13152 		 parts of the object.  If we find a piece of the object
13153 		 that is unused, we must arrange for an appropriate REG_DEAD
13154 		 note to be added for it.  However, we can't just emit a USE
13155 		 and tag the note to it, since the register might actually
13156 		 be dead; so we recourse, and the recursive call then finds
13157 		 the previous insn that used this register.  */
13158 
13159 	      if (place && regno < FIRST_PSEUDO_REGISTER
13160 		  && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
13161 		{
13162 		  unsigned int endregno = END_HARD_REGNO (XEXP (note, 0));
13163 		  int all_used = 1;
13164 		  unsigned int i;
13165 
13166 		  for (i = regno; i < endregno; i++)
13167 		    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
13168 			 && ! find_regno_fusage (place, USE, i))
13169 			|| dead_or_set_regno_p (place, i))
13170 		      all_used = 0;
13171 
13172 		  if (! all_used)
13173 		    {
13174 		      /* Put only REG_DEAD notes for pieces that are
13175 			 not already dead or set.  */
13176 
13177 		      for (i = regno; i < endregno;
13178 			   i += hard_regno_nregs[i][reg_raw_mode[i]])
13179 			{
13180 			  rtx piece = regno_reg_rtx[i];
13181 			  basic_block bb = this_basic_block;
13182 
13183 			  if (! dead_or_set_p (place, piece)
13184 			      && ! reg_bitfield_target_p (piece,
13185 							  PATTERN (place)))
13186 			    {
13187 			      rtx new_note = alloc_reg_note (REG_DEAD, piece,
13188 							     NULL_RTX);
13189 
13190 			      distribute_notes (new_note, place, place,
13191 						NULL_RTX, NULL_RTX, NULL_RTX);
13192 			    }
13193 			  else if (! refers_to_regno_p (i, i + 1,
13194 							PATTERN (place), 0)
13195 				   && ! find_regno_fusage (place, USE, i))
13196 			    for (tem = PREV_INSN (place); ;
13197 				 tem = PREV_INSN (tem))
13198 			      {
13199 				if (!NONDEBUG_INSN_P (tem))
13200 				  {
13201 				    if (tem == BB_HEAD (bb))
13202 			 	      break;
13203 				    continue;
13204 				  }
13205 				if (dead_or_set_p (tem, piece)
13206 				    || reg_bitfield_target_p (piece,
13207 							      PATTERN (tem)))
13208 				  {
13209 				    add_reg_note (tem, REG_UNUSED, piece);
13210 				    break;
13211 				  }
13212 			      }
13213 
13214 			}
13215 
13216 		      place = 0;
13217 		    }
13218 		}
13219 	    }
13220 	  break;
13221 
13222 	default:
13223 	  /* Any other notes should not be present at this point in the
13224 	     compilation.  */
13225 	  gcc_unreachable ();
13226 	}
13227 
13228       if (place)
13229 	{
13230 	  XEXP (note, 1) = REG_NOTES (place);
13231 	  REG_NOTES (place) = note;
13232 	}
13233 
13234       if (place2)
13235 	add_reg_note (place2, REG_NOTE_KIND (note), XEXP (note, 0));
13236     }
13237 }
13238 
13239 /* Similarly to above, distribute the LOG_LINKS that used to be present on
13240    I3, I2, and I1 to new locations.  This is also called to add a link
13241    pointing at I3 when I3's destination is changed.  */
13242 
13243 static void
13244 distribute_links (rtx links)
13245 {
13246   rtx link, next_link;
13247 
13248   for (link = links; link; link = next_link)
13249     {
13250       rtx place = 0;
13251       rtx insn;
13252       rtx set, reg;
13253 
13254       next_link = XEXP (link, 1);
13255 
13256       /* If the insn that this link points to is a NOTE or isn't a single
13257 	 set, ignore it.  In the latter case, it isn't clear what we
13258 	 can do other than ignore the link, since we can't tell which
13259 	 register it was for.  Such links wouldn't be used by combine
13260 	 anyway.
13261 
13262 	 It is not possible for the destination of the target of the link to
13263 	 have been changed by combine.  The only potential of this is if we
13264 	 replace I3, I2, and I1 by I3 and I2.  But in that case the
13265 	 destination of I2 also remains unchanged.  */
13266 
13267       if (NOTE_P (XEXP (link, 0))
13268 	  || (set = single_set (XEXP (link, 0))) == 0)
13269 	continue;
13270 
13271       reg = SET_DEST (set);
13272       while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
13273 	     || GET_CODE (reg) == STRICT_LOW_PART)
13274 	reg = XEXP (reg, 0);
13275 
13276       /* A LOG_LINK is defined as being placed on the first insn that uses
13277 	 a register and points to the insn that sets the register.  Start
13278 	 searching at the next insn after the target of the link and stop
13279 	 when we reach a set of the register or the end of the basic block.
13280 
13281 	 Note that this correctly handles the link that used to point from
13282 	 I3 to I2.  Also note that not much searching is typically done here
13283 	 since most links don't point very far away.  */
13284 
13285       for (insn = NEXT_INSN (XEXP (link, 0));
13286 	   (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
13287 		     || BB_HEAD (this_basic_block->next_bb) != insn));
13288 	   insn = NEXT_INSN (insn))
13289 	if (DEBUG_INSN_P (insn))
13290 	  continue;
13291 	else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
13292 	  {
13293 	    if (reg_referenced_p (reg, PATTERN (insn)))
13294 	      place = insn;
13295 	    break;
13296 	  }
13297 	else if (CALL_P (insn)
13298 		 && find_reg_fusage (insn, USE, reg))
13299 	  {
13300 	    place = insn;
13301 	    break;
13302 	  }
13303 	else if (INSN_P (insn) && reg_set_p (reg, insn))
13304 	  break;
13305 
13306       /* If we found a place to put the link, place it there unless there
13307 	 is already a link to the same insn as LINK at that point.  */
13308 
13309       if (place)
13310 	{
13311 	  rtx link2;
13312 
13313 	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13314 	    if (XEXP (link2, 0) == XEXP (link, 0))
13315 	      break;
13316 
13317 	  if (link2 == 0)
13318 	    {
13319 	      XEXP (link, 1) = LOG_LINKS (place);
13320 	      LOG_LINKS (place) = link;
13321 
13322 	      /* Set added_links_insn to the earliest insn we added a
13323 		 link to.  */
13324 	      if (added_links_insn == 0
13325 		  || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
13326 		added_links_insn = place;
13327 	    }
13328 	}
13329     }
13330 }
13331 
13332 /* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
13333    Check whether the expression pointer to by LOC is a register or
13334    memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
13335    Otherwise return zero.  */
13336 
13337 static int
13338 unmentioned_reg_p_1 (rtx *loc, void *expr)
13339 {
13340   rtx x = *loc;
13341 
13342   if (x != NULL_RTX
13343       && (REG_P (x) || MEM_P (x))
13344       && ! reg_mentioned_p (x, (rtx) expr))
13345     return 1;
13346   return 0;
13347 }
13348 
13349 /* Check for any register or memory mentioned in EQUIV that is not
13350    mentioned in EXPR.  This is used to restrict EQUIV to "specializations"
13351    of EXPR where some registers may have been replaced by constants.  */
13352 
13353 static bool
13354 unmentioned_reg_p (rtx equiv, rtx expr)
13355 {
13356   return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
13357 }
13358 
13359 void
13360 dump_combine_stats (FILE *file)
13361 {
13362   fprintf
13363     (file,
13364      ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13365      combine_attempts, combine_merges, combine_extras, combine_successes);
13366 }
13367 
13368 void
13369 dump_combine_total_stats (FILE *file)
13370 {
13371   fprintf
13372     (file,
13373      "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13374      total_attempts, total_merges, total_extras, total_successes);
13375 }
13376 
13377 static bool
13378 gate_handle_combine (void)
13379 {
13380   return (optimize > 0);
13381 }
13382 
13383 /* Try combining insns through substitution.  */
13384 static unsigned int
13385 rest_of_handle_combine (void)
13386 {
13387   int rebuild_jump_labels_after_combine;
13388 
13389   df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
13390   df_note_add_problem ();
13391   df_analyze ();
13392 
13393   regstat_init_n_sets_and_refs ();
13394 
13395   rebuild_jump_labels_after_combine
13396     = combine_instructions (get_insns (), max_reg_num ());
13397 
13398   /* Combining insns may have turned an indirect jump into a
13399      direct jump.  Rebuild the JUMP_LABEL fields of jumping
13400      instructions.  */
13401   if (rebuild_jump_labels_after_combine)
13402     {
13403       timevar_push (TV_JUMP);
13404       rebuild_jump_labels (get_insns ());
13405       cleanup_cfg (0);
13406       timevar_pop (TV_JUMP);
13407     }
13408 
13409   regstat_free_n_sets_and_refs ();
13410   return 0;
13411 }
13412 
13413 struct rtl_opt_pass pass_combine =
13414 {
13415  {
13416   RTL_PASS,
13417   "combine",                            /* name */
13418   gate_handle_combine,                  /* gate */
13419   rest_of_handle_combine,               /* execute */
13420   NULL,                                 /* sub */
13421   NULL,                                 /* next */
13422   0,                                    /* static_pass_number */
13423   TV_COMBINE,                           /* tv_id */
13424   PROP_cfglayout,                       /* properties_required */
13425   0,                                    /* properties_provided */
13426   0,                                    /* properties_destroyed */
13427   0,                                    /* todo_flags_start */
13428   TODO_dump_func |
13429   TODO_df_finish | TODO_verify_rtl_sharing |
13430   TODO_ggc_collect,                     /* todo_flags_finish */
13431  }
13432 };
13433