xref: /openbsd-src/gnu/gcc/gcc/combine.c (revision 9bb4e7eb2175c8f4f5e2297b43a8e354d98b70d4)
1 /* Optimize by combining instructions for GNU compiler.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA.  */
21 
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23    Portable Optimizer, but redone to work on our list-structured
24    representation for RTL instead of their string representation.
25 
26    The LOG_LINKS of each insn identify the most recent assignment
27    to each REG used in the insn.  It is a list of previous insns,
28    each of which contains a SET for a REG that is used in this insn
29    and not used or set in between.  LOG_LINKs never cross basic blocks.
30    They were set up by the preceding pass (lifetime analysis).
31 
32    We try to combine each pair of insns joined by a logical link.
33    We also try to combine triples of insns A, B and C when
34    C has a link back to B and B has a link back to A.
35 
36    LOG_LINKS does not have links for use of the CC0.  They don't
37    need to, because the insn that sets the CC0 is always immediately
38    before the insn that tests it.  So we always regard a branch
39    insn as having a logical link to the preceding insn.  The same is true
40    for an insn explicitly using CC0.
41 
42    We check (with use_crosses_set_p) to avoid combining in such a way
43    as to move a computation to a place where its value would be different.
44 
45    Combination is done by mathematically substituting the previous
46    insn(s) values for the regs they set into the expressions in
47    the later insns that refer to these regs.  If the result is a valid insn
48    for our target machine, according to the machine description,
49    we install it, delete the earlier insns, and update the data flow
50    information (LOG_LINKS and REG_NOTES) for what we did.
51 
52    There are a few exceptions where the dataflow information created by
53    flow.c aren't completely updated:
54 
55    - reg_live_length is not updated
56    - reg_n_refs is not adjusted in the rare case when a register is
57      no longer required in a computation
58    - there are extremely rare cases (see distribute_notes) when a
59      REG_DEAD note is lost
60    - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61      removed because there is no way to know which register it was
62      linking
63 
64    To simplify substitution, we combine only when the earlier insn(s)
65    consist of only a single assignment.  To simplify updating afterward,
66    we never combine when a subroutine call appears in the middle.
67 
68    Since we do not represent assignments to CC0 explicitly except when that
69    is all an insn does, there is no LOG_LINKS entry in an insn that uses
70    the condition code for the insn that set the condition code.
71    Fortunately, these two insns must be consecutive.
72    Therefore, every JUMP_INSN is taken to have an implicit logical link
73    to the preceding insn.  This is not quite right, since non-jumps can
74    also use the condition code; but in practice such insns would not
75    combine anyway.  */
76 
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "tm.h"
81 #include "rtl.h"
82 #include "tree.h"
83 #include "tm_p.h"
84 #include "flags.h"
85 #include "regs.h"
86 #include "hard-reg-set.h"
87 #include "basic-block.h"
88 #include "insn-config.h"
89 #include "function.h"
90 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
91 #include "expr.h"
92 #include "insn-attr.h"
93 #include "recog.h"
94 #include "real.h"
95 #include "toplev.h"
96 #include "target.h"
97 #include "optabs.h"
98 #include "insn-codes.h"
99 #include "rtlhooks-def.h"
100 /* Include output.h for dump_file.  */
101 #include "output.h"
102 #include "params.h"
103 #include "timevar.h"
104 #include "tree-pass.h"
105 
106 /* Number of attempts to combine instructions in this function.  */
107 
108 static int combine_attempts;
109 
110 /* Number of attempts that got as far as substitution in this function.  */
111 
112 static int combine_merges;
113 
114 /* Number of instructions combined with added SETs in this function.  */
115 
116 static int combine_extras;
117 
118 /* Number of instructions combined in this function.  */
119 
120 static int combine_successes;
121 
122 /* Totals over entire compilation.  */
123 
124 static int total_attempts, total_merges, total_extras, total_successes;
125 
126 /* combine_instructions may try to replace the right hand side of the
127    second instruction with the value of an associated REG_EQUAL note
128    before throwing it at try_combine.  That is problematic when there
129    is a REG_DEAD note for a register used in the old right hand side
130    and can cause distribute_notes to do wrong things.  This is the
131    second instruction if it has been so modified, null otherwise.  */
132 
133 static rtx i2mod;
134 
135 /* When I2MOD is nonnull, this is a copy of the old right hand side.  */
136 
137 static rtx i2mod_old_rhs;
138 
139 /* When I2MOD is nonnull, this is a copy of the new right hand side.  */
140 
141 static rtx i2mod_new_rhs;
142 
143 /* Vector mapping INSN_UIDs to cuids.
144    The cuids are like uids but increase monotonically always.
145    Combine always uses cuids so that it can compare them.
146    But actually renumbering the uids, which we used to do,
147    proves to be a bad idea because it makes it hard to compare
148    the dumps produced by earlier passes with those from later passes.  */
149 
150 static int *uid_cuid;
151 static int max_uid_cuid;
152 
153 /* Get the cuid of an insn.  */
154 
155 #define INSN_CUID(INSN) \
156 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
157 
158 /* Maximum register number, which is the size of the tables below.  */
159 
160 static unsigned int combine_max_regno;
161 
162 struct reg_stat {
163   /* Record last point of death of (hard or pseudo) register n.  */
164   rtx				last_death;
165 
166   /* Record last point of modification of (hard or pseudo) register n.  */
167   rtx				last_set;
168 
169   /* The next group of fields allows the recording of the last value assigned
170      to (hard or pseudo) register n.  We use this information to see if an
171      operation being processed is redundant given a prior operation performed
172      on the register.  For example, an `and' with a constant is redundant if
173      all the zero bits are already known to be turned off.
174 
175      We use an approach similar to that used by cse, but change it in the
176      following ways:
177 
178      (1) We do not want to reinitialize at each label.
179      (2) It is useful, but not critical, to know the actual value assigned
180 	 to a register.  Often just its form is helpful.
181 
182      Therefore, we maintain the following fields:
183 
184      last_set_value		the last value assigned
185      last_set_label		records the value of label_tick when the
186 				register was assigned
187      last_set_table_tick	records the value of label_tick when a
188 				value using the register is assigned
189      last_set_invalid		set to nonzero when it is not valid
190 				to use the value of this register in some
191 				register's value
192 
193      To understand the usage of these tables, it is important to understand
194      the distinction between the value in last_set_value being valid and
195      the register being validly contained in some other expression in the
196      table.
197 
198      (The next two parameters are out of date).
199 
200      reg_stat[i].last_set_value is valid if it is nonzero, and either
201      reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
202 
203      Register I may validly appear in any expression returned for the value
204      of another register if reg_n_sets[i] is 1.  It may also appear in the
205      value for register J if reg_stat[j].last_set_invalid is zero, or
206      reg_stat[i].last_set_label < reg_stat[j].last_set_label.
207 
208      If an expression is found in the table containing a register which may
209      not validly appear in an expression, the register is replaced by
210      something that won't match, (clobber (const_int 0)).  */
211 
212   /* Record last value assigned to (hard or pseudo) register n.  */
213 
214   rtx				last_set_value;
215 
216   /* Record the value of label_tick when an expression involving register n
217      is placed in last_set_value.  */
218 
219   int				last_set_table_tick;
220 
221   /* Record the value of label_tick when the value for register n is placed in
222      last_set_value.  */
223 
224   int				last_set_label;
225 
226   /* These fields are maintained in parallel with last_set_value and are
227      used to store the mode in which the register was last set, the bits
228      that were known to be zero when it was last set, and the number of
229      sign bits copies it was known to have when it was last set.  */
230 
231   unsigned HOST_WIDE_INT	last_set_nonzero_bits;
232   char				last_set_sign_bit_copies;
233   ENUM_BITFIELD(machine_mode)	last_set_mode : 8;
234 
235   /* Set nonzero if references to register n in expressions should not be
236      used.  last_set_invalid is set nonzero when this register is being
237      assigned to and last_set_table_tick == label_tick.  */
238 
239   char				last_set_invalid;
240 
241   /* Some registers that are set more than once and used in more than one
242      basic block are nevertheless always set in similar ways.  For example,
243      a QImode register may be loaded from memory in two places on a machine
244      where byte loads zero extend.
245 
246      We record in the following fields if a register has some leading bits
247      that are always equal to the sign bit, and what we know about the
248      nonzero bits of a register, specifically which bits are known to be
249      zero.
250 
251      If an entry is zero, it means that we don't know anything special.  */
252 
253   unsigned char			sign_bit_copies;
254 
255   unsigned HOST_WIDE_INT	nonzero_bits;
256 
257   /* Record the value of the label_tick when the last truncation
258      happened.  The field truncated_to_mode is only valid if
259      truncation_label == label_tick.  */
260 
261   int				truncation_label;
262 
263   /* Record the last truncation seen for this register.  If truncation
264      is not a nop to this mode we might be able to save an explicit
265      truncation if we know that value already contains a truncated
266      value.  */
267 
268   ENUM_BITFIELD(machine_mode)	truncated_to_mode : 8;
269 };
270 
271 static struct reg_stat *reg_stat;
272 
273 /* Record the cuid of the last insn that invalidated memory
274    (anything that writes memory, and subroutine calls, but not pushes).  */
275 
276 static int mem_last_set;
277 
278 /* Record the cuid of the last CALL_INSN
279    so we can tell whether a potential combination crosses any calls.  */
280 
281 static int last_call_cuid;
282 
283 /* When `subst' is called, this is the insn that is being modified
284    (by combining in a previous insn).  The PATTERN of this insn
285    is still the old pattern partially modified and it should not be
286    looked at, but this may be used to examine the successors of the insn
287    to judge whether a simplification is valid.  */
288 
289 static rtx subst_insn;
290 
291 /* This is the lowest CUID that `subst' is currently dealing with.
292    get_last_value will not return a value if the register was set at or
293    after this CUID.  If not for this mechanism, we could get confused if
294    I2 or I1 in try_combine were an insn that used the old value of a register
295    to obtain a new value.  In that case, we might erroneously get the
296    new value of the register when we wanted the old one.  */
297 
298 static int subst_low_cuid;
299 
300 /* This contains any hard registers that are used in newpat; reg_dead_at_p
301    must consider all these registers to be always live.  */
302 
303 static HARD_REG_SET newpat_used_regs;
304 
305 /* This is an insn to which a LOG_LINKS entry has been added.  If this
306    insn is the earlier than I2 or I3, combine should rescan starting at
307    that location.  */
308 
309 static rtx added_links_insn;
310 
311 /* Basic block in which we are performing combines.  */
312 static basic_block this_basic_block;
313 
314 /* A bitmap indicating which blocks had registers go dead at entry.
315    After combine, we'll need to re-do global life analysis with
316    those blocks as starting points.  */
317 static sbitmap refresh_blocks;
318 
319 /* The following array records the insn_rtx_cost for every insn
320    in the instruction stream.  */
321 
322 static int *uid_insn_cost;
323 
324 /* Length of the currently allocated uid_insn_cost array.  */
325 
326 static int last_insn_cost;
327 
328 /* Incremented for each label.  */
329 
330 static int label_tick;
331 
332 /* Mode used to compute significance in reg_stat[].nonzero_bits.  It is the
333    largest integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
334 
335 static enum machine_mode nonzero_bits_mode;
336 
337 /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
338    be safely used.  It is zero while computing them and after combine has
339    completed.  This former test prevents propagating values based on
340    previously set values, which can be incorrect if a variable is modified
341    in a loop.  */
342 
343 static int nonzero_sign_valid;
344 
345 
346 /* Record one modification to rtl structure
347    to be undone by storing old_contents into *where.  */
348 
349 struct undo
350 {
351   struct undo *next;
352   enum { UNDO_RTX, UNDO_INT, UNDO_MODE } kind;
353   union { rtx r; int i; enum machine_mode m; } old_contents;
354   union { rtx *r; int *i; } where;
355 };
356 
357 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
358    num_undo says how many are currently recorded.
359 
360    other_insn is nonzero if we have modified some other insn in the process
361    of working on subst_insn.  It must be verified too.  */
362 
363 struct undobuf
364 {
365   struct undo *undos;
366   struct undo *frees;
367   rtx other_insn;
368 };
369 
370 static struct undobuf undobuf;
371 
372 /* Number of times the pseudo being substituted for
373    was found and replaced.  */
374 
375 static int n_occurrences;
376 
377 static rtx reg_nonzero_bits_for_combine (rtx, enum machine_mode, rtx,
378 					 enum machine_mode,
379 					 unsigned HOST_WIDE_INT,
380 					 unsigned HOST_WIDE_INT *);
381 static rtx reg_num_sign_bit_copies_for_combine (rtx, enum machine_mode, rtx,
382 						enum machine_mode,
383 						unsigned int, unsigned int *);
384 static void do_SUBST (rtx *, rtx);
385 static void do_SUBST_INT (int *, int);
386 static void init_reg_last (void);
387 static void setup_incoming_promotions (void);
388 static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *);
389 static int cant_combine_insn_p (rtx);
390 static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
391 static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
392 static int contains_muldiv (rtx);
393 static rtx try_combine (rtx, rtx, rtx, int *);
394 static void undo_all (void);
395 static void undo_commit (void);
396 static rtx *find_split_point (rtx *, rtx);
397 static rtx subst (rtx, rtx, rtx, int, int);
398 static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
399 static rtx simplify_if_then_else (rtx);
400 static rtx simplify_set (rtx);
401 static rtx simplify_logical (rtx);
402 static rtx expand_compound_operation (rtx);
403 static rtx expand_field_assignment (rtx);
404 static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
405 			    rtx, unsigned HOST_WIDE_INT, int, int, int);
406 static rtx extract_left_shift (rtx, int);
407 static rtx make_compound_operation (rtx, enum rtx_code);
408 static int get_pos_from_mask (unsigned HOST_WIDE_INT,
409 			      unsigned HOST_WIDE_INT *);
410 static rtx canon_reg_for_combine (rtx, rtx);
411 static rtx force_to_mode (rtx, enum machine_mode,
412 			  unsigned HOST_WIDE_INT, int);
413 static rtx if_then_else_cond (rtx, rtx *, rtx *);
414 static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
415 static int rtx_equal_for_field_assignment_p (rtx, rtx);
416 static rtx make_field_assignment (rtx);
417 static rtx apply_distributive_law (rtx);
418 static rtx distribute_and_simplify_rtx (rtx, int);
419 static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
420 				     unsigned HOST_WIDE_INT);
421 static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
422 				   unsigned HOST_WIDE_INT);
423 static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
424 			    HOST_WIDE_INT, enum machine_mode, int *);
425 static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
426 static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
427 				 int);
428 static int recog_for_combine (rtx *, rtx, rtx *);
429 static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
430 static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
431 static void update_table_tick (rtx);
432 static void record_value_for_reg (rtx, rtx, rtx);
433 static void check_conversions (rtx, rtx);
434 static void record_dead_and_set_regs_1 (rtx, rtx, void *);
435 static void record_dead_and_set_regs (rtx);
436 static int get_last_value_validate (rtx *, rtx, int, int);
437 static rtx get_last_value (rtx);
438 static int use_crosses_set_p (rtx, int);
439 static void reg_dead_at_p_1 (rtx, rtx, void *);
440 static int reg_dead_at_p (rtx, rtx);
441 static void move_deaths (rtx, rtx, int, rtx, rtx *);
442 static int reg_bitfield_target_p (rtx, rtx);
443 static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
444 static void distribute_links (rtx);
445 static void mark_used_regs_combine (rtx);
446 static int insn_cuid (rtx);
447 static void record_promoted_value (rtx, rtx);
448 static int unmentioned_reg_p_1 (rtx *, void *);
449 static bool unmentioned_reg_p (rtx, rtx);
450 static void record_truncated_value (rtx);
451 static bool reg_truncated_to_mode (enum machine_mode, rtx);
452 static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
453 
454 
455 /* It is not safe to use ordinary gen_lowpart in combine.
456    See comments in gen_lowpart_for_combine.  */
457 #undef RTL_HOOKS_GEN_LOWPART
458 #define RTL_HOOKS_GEN_LOWPART              gen_lowpart_for_combine
459 
460 /* Our implementation of gen_lowpart never emits a new pseudo.  */
461 #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
462 #define RTL_HOOKS_GEN_LOWPART_NO_EMIT      gen_lowpart_for_combine
463 
464 #undef RTL_HOOKS_REG_NONZERO_REG_BITS
465 #define RTL_HOOKS_REG_NONZERO_REG_BITS     reg_nonzero_bits_for_combine
466 
467 #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
468 #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES  reg_num_sign_bit_copies_for_combine
469 
470 #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
471 #define RTL_HOOKS_REG_TRUNCATED_TO_MODE    reg_truncated_to_mode
472 
473 static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
474 
475 
476 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
477    insn.  The substitution can be undone by undo_all.  If INTO is already
478    set to NEWVAL, do not record this change.  Because computing NEWVAL might
479    also call SUBST, we have to compute it before we put anything into
480    the undo table.  */
481 
482 static void
do_SUBST(rtx * into,rtx newval)483 do_SUBST (rtx *into, rtx newval)
484 {
485   struct undo *buf;
486   rtx oldval = *into;
487 
488   if (oldval == newval)
489     return;
490 
491   /* We'd like to catch as many invalid transformations here as
492      possible.  Unfortunately, there are way too many mode changes
493      that are perfectly valid, so we'd waste too much effort for
494      little gain doing the checks here.  Focus on catching invalid
495      transformations involving integer constants.  */
496   if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
497       && GET_CODE (newval) == CONST_INT)
498     {
499       /* Sanity check that we're replacing oldval with a CONST_INT
500 	 that is a valid sign-extension for the original mode.  */
501       gcc_assert (INTVAL (newval)
502 		  == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
503 
504       /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
505 	 CONST_INT is not valid, because after the replacement, the
506 	 original mode would be gone.  Unfortunately, we can't tell
507 	 when do_SUBST is called to replace the operand thereof, so we
508 	 perform this test on oldval instead, checking whether an
509 	 invalid replacement took place before we got here.  */
510       gcc_assert (!(GET_CODE (oldval) == SUBREG
511 		    && GET_CODE (SUBREG_REG (oldval)) == CONST_INT));
512       gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
513 		    && GET_CODE (XEXP (oldval, 0)) == CONST_INT));
514     }
515 
516   if (undobuf.frees)
517     buf = undobuf.frees, undobuf.frees = buf->next;
518   else
519     buf = XNEW (struct undo);
520 
521   buf->kind = UNDO_RTX;
522   buf->where.r = into;
523   buf->old_contents.r = oldval;
524   *into = newval;
525 
526   buf->next = undobuf.undos, undobuf.undos = buf;
527 }
528 
529 #define SUBST(INTO, NEWVAL)	do_SUBST(&(INTO), (NEWVAL))
530 
531 /* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
532    for the value of a HOST_WIDE_INT value (including CONST_INT) is
533    not safe.  */
534 
535 static void
do_SUBST_INT(int * into,int newval)536 do_SUBST_INT (int *into, int newval)
537 {
538   struct undo *buf;
539   int oldval = *into;
540 
541   if (oldval == newval)
542     return;
543 
544   if (undobuf.frees)
545     buf = undobuf.frees, undobuf.frees = buf->next;
546   else
547     buf = XNEW (struct undo);
548 
549   buf->kind = UNDO_INT;
550   buf->where.i = into;
551   buf->old_contents.i = oldval;
552   *into = newval;
553 
554   buf->next = undobuf.undos, undobuf.undos = buf;
555 }
556 
557 #define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
558 
559 /* Similar to SUBST, but just substitute the mode.  This is used when
560    changing the mode of a pseudo-register, so that any other
561    references to the entry in the regno_reg_rtx array will change as
562    well.  */
563 
564 static void
do_SUBST_MODE(rtx * into,enum machine_mode newval)565 do_SUBST_MODE (rtx *into, enum machine_mode newval)
566 {
567   struct undo *buf;
568   enum machine_mode oldval = GET_MODE (*into);
569 
570   if (oldval == newval)
571     return;
572 
573   if (undobuf.frees)
574     buf = undobuf.frees, undobuf.frees = buf->next;
575   else
576     buf = XNEW (struct undo);
577 
578   buf->kind = UNDO_MODE;
579   buf->where.r = into;
580   buf->old_contents.m = oldval;
581   PUT_MODE (*into, newval);
582 
583   buf->next = undobuf.undos, undobuf.undos = buf;
584 }
585 
586 #define SUBST_MODE(INTO, NEWVAL)  do_SUBST_MODE(&(INTO), (NEWVAL))
587 
588 /* Subroutine of try_combine.  Determine whether the combine replacement
589    patterns NEWPAT and NEWI2PAT are cheaper according to insn_rtx_cost
590    that the original instruction sequence I1, I2 and I3.  Note that I1
591    and/or NEWI2PAT may be NULL_RTX.  This function returns false, if the
592    costs of all instructions can be estimated, and the replacements are
593    more expensive than the original sequence.  */
594 
595 static bool
combine_validate_cost(rtx i1,rtx i2,rtx i3,rtx newpat,rtx newi2pat)596 combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat)
597 {
598   int i1_cost, i2_cost, i3_cost;
599   int new_i2_cost, new_i3_cost;
600   int old_cost, new_cost;
601 
602   /* Lookup the original insn_rtx_costs.  */
603   i2_cost = INSN_UID (i2) <= last_insn_cost
604 	    ? uid_insn_cost[INSN_UID (i2)] : 0;
605   i3_cost = INSN_UID (i3) <= last_insn_cost
606 	    ? uid_insn_cost[INSN_UID (i3)] : 0;
607 
608   if (i1)
609     {
610       i1_cost = INSN_UID (i1) <= last_insn_cost
611 		? uid_insn_cost[INSN_UID (i1)] : 0;
612       old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
613 		 ? i1_cost + i2_cost + i3_cost : 0;
614     }
615   else
616     {
617       old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
618       i1_cost = 0;
619     }
620 
621   /* Calculate the replacement insn_rtx_costs.  */
622   new_i3_cost = insn_rtx_cost (newpat);
623   if (newi2pat)
624     {
625       new_i2_cost = insn_rtx_cost (newi2pat);
626       new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
627 		 ? new_i2_cost + new_i3_cost : 0;
628     }
629   else
630     {
631       new_cost = new_i3_cost;
632       new_i2_cost = 0;
633     }
634 
635   if (undobuf.other_insn)
636     {
637       int old_other_cost, new_other_cost;
638 
639       old_other_cost = (INSN_UID (undobuf.other_insn) <= last_insn_cost
640 			? uid_insn_cost[INSN_UID (undobuf.other_insn)] : 0);
641       new_other_cost = insn_rtx_cost (PATTERN (undobuf.other_insn));
642       if (old_other_cost > 0 && new_other_cost > 0)
643 	{
644 	  old_cost += old_other_cost;
645 	  new_cost += new_other_cost;
646 	}
647       else
648 	old_cost = 0;
649     }
650 
651   /* Disallow this recombination if both new_cost and old_cost are
652      greater than zero, and new_cost is greater than old cost.  */
653   if (old_cost > 0
654       && new_cost > old_cost)
655     {
656       if (dump_file)
657 	{
658 	  if (i1)
659 	    {
660 	      fprintf (dump_file,
661 		       "rejecting combination of insns %d, %d and %d\n",
662 		       INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
663 	      fprintf (dump_file, "original costs %d + %d + %d = %d\n",
664 		       i1_cost, i2_cost, i3_cost, old_cost);
665 	    }
666 	  else
667 	    {
668 	      fprintf (dump_file,
669 		       "rejecting combination of insns %d and %d\n",
670 		       INSN_UID (i2), INSN_UID (i3));
671 	      fprintf (dump_file, "original costs %d + %d = %d\n",
672 		       i2_cost, i3_cost, old_cost);
673 	    }
674 
675 	  if (newi2pat)
676 	    {
677 	      fprintf (dump_file, "replacement costs %d + %d = %d\n",
678 		       new_i2_cost, new_i3_cost, new_cost);
679 	    }
680 	  else
681 	    fprintf (dump_file, "replacement cost %d\n", new_cost);
682 	}
683 
684       return false;
685     }
686 
687   /* Update the uid_insn_cost array with the replacement costs.  */
688   uid_insn_cost[INSN_UID (i2)] = new_i2_cost;
689   uid_insn_cost[INSN_UID (i3)] = new_i3_cost;
690   if (i1)
691     uid_insn_cost[INSN_UID (i1)] = 0;
692 
693   return true;
694 }
695 
696 /* Main entry point for combiner.  F is the first insn of the function.
697    NREGS is the first unused pseudo-reg number.
698 
699    Return nonzero if the combiner has turned an indirect jump
700    instruction into a direct jump.  */
701 static int
combine_instructions(rtx f,unsigned int nregs)702 combine_instructions (rtx f, unsigned int nregs)
703 {
704   rtx insn, next;
705 #ifdef HAVE_cc0
706   rtx prev;
707 #endif
708   int i;
709   unsigned int j = 0;
710   rtx links, nextlinks;
711   sbitmap_iterator sbi;
712 
713   int new_direct_jump_p = 0;
714 
715   combine_attempts = 0;
716   combine_merges = 0;
717   combine_extras = 0;
718   combine_successes = 0;
719 
720   combine_max_regno = nregs;
721 
722   rtl_hooks = combine_rtl_hooks;
723 
724   reg_stat = XCNEWVEC (struct reg_stat, nregs);
725 
726   init_recog_no_volatile ();
727 
728   /* Compute maximum uid value so uid_cuid can be allocated.  */
729 
730   for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
731     if (INSN_UID (insn) > i)
732       i = INSN_UID (insn);
733 
734   uid_cuid = XNEWVEC (int, i + 1);
735   max_uid_cuid = i;
736 
737   nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
738 
739   /* Don't use reg_stat[].nonzero_bits when computing it.  This can cause
740      problems when, for example, we have j <<= 1 in a loop.  */
741 
742   nonzero_sign_valid = 0;
743 
744   /* Compute the mapping from uids to cuids.
745      Cuids are numbers assigned to insns, like uids,
746      except that cuids increase monotonically through the code.
747 
748      Scan all SETs and see if we can deduce anything about what
749      bits are known to be zero for some registers and how many copies
750      of the sign bit are known to exist for those registers.
751 
752      Also set any known values so that we can use it while searching
753      for what bits are known to be set.  */
754 
755   label_tick = 1;
756 
757   setup_incoming_promotions ();
758 
759   refresh_blocks = sbitmap_alloc (last_basic_block);
760   sbitmap_zero (refresh_blocks);
761 
762   /* Allocate array of current insn_rtx_costs.  */
763   uid_insn_cost = XCNEWVEC (int, max_uid_cuid + 1);
764   last_insn_cost = max_uid_cuid;
765 
766   for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
767     {
768       uid_cuid[INSN_UID (insn)] = ++i;
769       subst_low_cuid = i;
770       subst_insn = insn;
771 
772       if (INSN_P (insn))
773 	{
774 	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
775 		       NULL);
776 	  record_dead_and_set_regs (insn);
777 
778 #ifdef AUTO_INC_DEC
779 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
780 	    if (REG_NOTE_KIND (links) == REG_INC)
781 	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
782 						NULL);
783 #endif
784 
785 	  /* Record the current insn_rtx_cost of this instruction.  */
786 	  if (NONJUMP_INSN_P (insn))
787 	    uid_insn_cost[INSN_UID (insn)] = insn_rtx_cost (PATTERN (insn));
788 	  if (dump_file)
789 	    fprintf(dump_file, "insn_cost %d: %d\n",
790 		    INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]);
791 	}
792 
793       if (LABEL_P (insn))
794 	label_tick++;
795     }
796 
797   nonzero_sign_valid = 1;
798 
799   /* Now scan all the insns in forward order.  */
800 
801   label_tick = 1;
802   last_call_cuid = 0;
803   mem_last_set = 0;
804   init_reg_last ();
805   setup_incoming_promotions ();
806 
807   FOR_EACH_BB (this_basic_block)
808     {
809       for (insn = BB_HEAD (this_basic_block);
810 	   insn != NEXT_INSN (BB_END (this_basic_block));
811 	   insn = next ? next : NEXT_INSN (insn))
812 	{
813 	  next = 0;
814 
815 	  if (LABEL_P (insn))
816 	    label_tick++;
817 
818 	  else if (INSN_P (insn))
819 	    {
820 	      /* See if we know about function return values before this
821 		 insn based upon SUBREG flags.  */
822 	      check_conversions (insn, PATTERN (insn));
823 
824 	      /* Try this insn with each insn it links back to.  */
825 
826 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
827 		if ((next = try_combine (insn, XEXP (links, 0),
828 					 NULL_RTX, &new_direct_jump_p)) != 0)
829 		  goto retry;
830 
831 	      /* Try each sequence of three linked insns ending with this one.  */
832 
833 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
834 		{
835 		  rtx link = XEXP (links, 0);
836 
837 		  /* If the linked insn has been replaced by a note, then there
838 		     is no point in pursuing this chain any further.  */
839 		  if (NOTE_P (link))
840 		    continue;
841 
842 		  for (nextlinks = LOG_LINKS (link);
843 		       nextlinks;
844 		       nextlinks = XEXP (nextlinks, 1))
845 		    if ((next = try_combine (insn, link,
846 					     XEXP (nextlinks, 0),
847 					     &new_direct_jump_p)) != 0)
848 		      goto retry;
849 		}
850 
851 #ifdef HAVE_cc0
852 	      /* Try to combine a jump insn that uses CC0
853 		 with a preceding insn that sets CC0, and maybe with its
854 		 logical predecessor as well.
855 		 This is how we make decrement-and-branch insns.
856 		 We need this special code because data flow connections
857 		 via CC0 do not get entered in LOG_LINKS.  */
858 
859 	      if (JUMP_P (insn)
860 		  && (prev = prev_nonnote_insn (insn)) != 0
861 		  && NONJUMP_INSN_P (prev)
862 		  && sets_cc0_p (PATTERN (prev)))
863 		{
864 		  if ((next = try_combine (insn, prev,
865 					   NULL_RTX, &new_direct_jump_p)) != 0)
866 		    goto retry;
867 
868 		  for (nextlinks = LOG_LINKS (prev); nextlinks;
869 		       nextlinks = XEXP (nextlinks, 1))
870 		    if ((next = try_combine (insn, prev,
871 					     XEXP (nextlinks, 0),
872 					     &new_direct_jump_p)) != 0)
873 		      goto retry;
874 		}
875 
876 	      /* Do the same for an insn that explicitly references CC0.  */
877 	      if (NONJUMP_INSN_P (insn)
878 		  && (prev = prev_nonnote_insn (insn)) != 0
879 		  && NONJUMP_INSN_P (prev)
880 		  && sets_cc0_p (PATTERN (prev))
881 		  && GET_CODE (PATTERN (insn)) == SET
882 		  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
883 		{
884 		  if ((next = try_combine (insn, prev,
885 					   NULL_RTX, &new_direct_jump_p)) != 0)
886 		    goto retry;
887 
888 		  for (nextlinks = LOG_LINKS (prev); nextlinks;
889 		       nextlinks = XEXP (nextlinks, 1))
890 		    if ((next = try_combine (insn, prev,
891 					     XEXP (nextlinks, 0),
892 					     &new_direct_jump_p)) != 0)
893 		      goto retry;
894 		}
895 
896 	      /* Finally, see if any of the insns that this insn links to
897 		 explicitly references CC0.  If so, try this insn, that insn,
898 		 and its predecessor if it sets CC0.  */
899 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
900 		if (NONJUMP_INSN_P (XEXP (links, 0))
901 		    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
902 		    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
903 		    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
904 		    && NONJUMP_INSN_P (prev)
905 		    && sets_cc0_p (PATTERN (prev))
906 		    && (next = try_combine (insn, XEXP (links, 0),
907 					    prev, &new_direct_jump_p)) != 0)
908 		  goto retry;
909 #endif
910 
911 	      /* Try combining an insn with two different insns whose results it
912 		 uses.  */
913 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
914 		for (nextlinks = XEXP (links, 1); nextlinks;
915 		     nextlinks = XEXP (nextlinks, 1))
916 		  if ((next = try_combine (insn, XEXP (links, 0),
917 					   XEXP (nextlinks, 0),
918 					   &new_direct_jump_p)) != 0)
919 		    goto retry;
920 
921 	      /* Try this insn with each REG_EQUAL note it links back to.  */
922 	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
923 		{
924 		  rtx set, note;
925 		  rtx temp = XEXP (links, 0);
926 		  if ((set = single_set (temp)) != 0
927 		      && (note = find_reg_equal_equiv_note (temp)) != 0
928 		      && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
929 		      /* Avoid using a register that may already been marked
930 			 dead by an earlier instruction.  */
931 		      && ! unmentioned_reg_p (note, SET_SRC (set))
932 		      && (GET_MODE (note) == VOIDmode
933 			  ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
934 			  : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
935 		    {
936 		      /* Temporarily replace the set's source with the
937 			 contents of the REG_EQUAL note.  The insn will
938 			 be deleted or recognized by try_combine.  */
939 		      rtx orig = SET_SRC (set);
940 		      SET_SRC (set) = note;
941 		      i2mod = temp;
942 		      i2mod_old_rhs = copy_rtx (orig);
943 		      i2mod_new_rhs = copy_rtx (note);
944 		      next = try_combine (insn, i2mod, NULL_RTX,
945 					  &new_direct_jump_p);
946 		      i2mod = NULL_RTX;
947 		      if (next)
948 			goto retry;
949 		      SET_SRC (set) = orig;
950 		    }
951 		}
952 
953 	      if (!NOTE_P (insn))
954 		record_dead_and_set_regs (insn);
955 
956 	    retry:
957 	      ;
958 	    }
959 	}
960     }
961   clear_bb_flags ();
962 
963   EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, j, sbi)
964     BASIC_BLOCK (j)->flags |= BB_DIRTY;
965   new_direct_jump_p |= purge_all_dead_edges ();
966   delete_noop_moves ();
967 
968   update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
969 				    PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
970 				    | PROP_KILL_DEAD_CODE);
971 
972   /* Clean up.  */
973   sbitmap_free (refresh_blocks);
974   free (uid_insn_cost);
975   free (reg_stat);
976   free (uid_cuid);
977 
978   {
979     struct undo *undo, *next;
980     for (undo = undobuf.frees; undo; undo = next)
981       {
982 	next = undo->next;
983 	free (undo);
984       }
985     undobuf.frees = 0;
986   }
987 
988   total_attempts += combine_attempts;
989   total_merges += combine_merges;
990   total_extras += combine_extras;
991   total_successes += combine_successes;
992 
993   nonzero_sign_valid = 0;
994   rtl_hooks = general_rtl_hooks;
995 
996   /* Make recognizer allow volatile MEMs again.  */
997   init_recog ();
998 
999   return new_direct_jump_p;
1000 }
1001 
1002 /* Wipe the last_xxx fields of reg_stat in preparation for another pass.  */
1003 
1004 static void
init_reg_last(void)1005 init_reg_last (void)
1006 {
1007   unsigned int i;
1008   for (i = 0; i < combine_max_regno; i++)
1009     memset (reg_stat + i, 0, offsetof (struct reg_stat, sign_bit_copies));
1010 }
1011 
1012 /* Set up any promoted values for incoming argument registers.  */
1013 
1014 static void
setup_incoming_promotions(void)1015 setup_incoming_promotions (void)
1016 {
1017   unsigned int regno;
1018   rtx reg;
1019   enum machine_mode mode;
1020   int unsignedp;
1021   rtx first = get_insns ();
1022 
1023   if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl)))
1024     {
1025       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1026 	/* Check whether this register can hold an incoming pointer
1027 	   argument.  FUNCTION_ARG_REGNO_P tests outgoing register
1028 	   numbers, so translate if necessary due to register windows.  */
1029 	if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
1030 	    && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
1031 	  {
1032 	    record_value_for_reg
1033 	      (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
1034 					   : SIGN_EXTEND),
1035 					  GET_MODE (reg),
1036 					  gen_rtx_CLOBBER (mode, const0_rtx)));
1037 	  }
1038     }
1039 }
1040 
1041 /* Called via note_stores.  If X is a pseudo that is narrower than
1042    HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1043 
1044    If we are setting only a portion of X and we can't figure out what
1045    portion, assume all bits will be used since we don't know what will
1046    be happening.
1047 
1048    Similarly, set how many bits of X are known to be copies of the sign bit
1049    at all locations in the function.  This is the smallest number implied
1050    by any set of X.  */
1051 
1052 static void
set_nonzero_bits_and_sign_copies(rtx x,rtx set,void * data ATTRIBUTE_UNUSED)1053 set_nonzero_bits_and_sign_copies (rtx x, rtx set,
1054 				  void *data ATTRIBUTE_UNUSED)
1055 {
1056   unsigned int num;
1057 
1058   if (REG_P (x)
1059       && REGNO (x) >= FIRST_PSEUDO_REGISTER
1060       /* If this register is undefined at the start of the file, we can't
1061 	 say what its contents were.  */
1062       && ! REGNO_REG_SET_P
1063 	 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start, REGNO (x))
1064       && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1065     {
1066       if (set == 0 || GET_CODE (set) == CLOBBER)
1067 	{
1068 	  reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1069 	  reg_stat[REGNO (x)].sign_bit_copies = 1;
1070 	  return;
1071 	}
1072 
1073       /* If this is a complex assignment, see if we can convert it into a
1074 	 simple assignment.  */
1075       set = expand_field_assignment (set);
1076 
1077       /* If this is a simple assignment, or we have a paradoxical SUBREG,
1078 	 set what we know about X.  */
1079 
1080       if (SET_DEST (set) == x
1081 	  || (GET_CODE (SET_DEST (set)) == SUBREG
1082 	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1083 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1084 	      && SUBREG_REG (SET_DEST (set)) == x))
1085 	{
1086 	  rtx src = SET_SRC (set);
1087 
1088 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1089 	  /* If X is narrower than a word and SRC is a non-negative
1090 	     constant that would appear negative in the mode of X,
1091 	     sign-extend it for use in reg_stat[].nonzero_bits because some
1092 	     machines (maybe most) will actually do the sign-extension
1093 	     and this is the conservative approach.
1094 
1095 	     ??? For 2.5, try to tighten up the MD files in this regard
1096 	     instead of this kludge.  */
1097 
1098 	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1099 	      && GET_CODE (src) == CONST_INT
1100 	      && INTVAL (src) > 0
1101 	      && 0 != (INTVAL (src)
1102 		       & ((HOST_WIDE_INT) 1
1103 			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1104 	    src = GEN_INT (INTVAL (src)
1105 			   | ((HOST_WIDE_INT) (-1)
1106 			      << GET_MODE_BITSIZE (GET_MODE (x))));
1107 #endif
1108 
1109 	  /* Don't call nonzero_bits if it cannot change anything.  */
1110 	  if (reg_stat[REGNO (x)].nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1111 	    reg_stat[REGNO (x)].nonzero_bits
1112 	      |= nonzero_bits (src, nonzero_bits_mode);
1113 	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1114 	  if (reg_stat[REGNO (x)].sign_bit_copies == 0
1115 	      || reg_stat[REGNO (x)].sign_bit_copies > num)
1116 	    reg_stat[REGNO (x)].sign_bit_copies = num;
1117 	}
1118       else
1119 	{
1120 	  reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1121 	  reg_stat[REGNO (x)].sign_bit_copies = 1;
1122 	}
1123     }
1124 }
1125 
1126 /* See if INSN can be combined into I3.  PRED and SUCC are optionally
1127    insns that were previously combined into I3 or that will be combined
1128    into the merger of INSN and I3.
1129 
1130    Return 0 if the combination is not allowed for any reason.
1131 
1132    If the combination is allowed, *PDEST will be set to the single
1133    destination of INSN and *PSRC to the single source, and this function
1134    will return 1.  */
1135 
1136 static int
can_combine_p(rtx insn,rtx i3,rtx pred ATTRIBUTE_UNUSED,rtx succ,rtx * pdest,rtx * psrc)1137 can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1138 	       rtx *pdest, rtx *psrc)
1139 {
1140   int i;
1141   rtx set = 0, src, dest;
1142   rtx p;
1143 #ifdef AUTO_INC_DEC
1144   rtx link;
1145 #endif
1146   int all_adjacent = (succ ? (next_active_insn (insn) == succ
1147 			      && next_active_insn (succ) == i3)
1148 		      : next_active_insn (insn) == i3);
1149 
1150   /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1151      or a PARALLEL consisting of such a SET and CLOBBERs.
1152 
1153      If INSN has CLOBBER parallel parts, ignore them for our processing.
1154      By definition, these happen during the execution of the insn.  When it
1155      is merged with another insn, all bets are off.  If they are, in fact,
1156      needed and aren't also supplied in I3, they may be added by
1157      recog_for_combine.  Otherwise, it won't match.
1158 
1159      We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1160      note.
1161 
1162      Get the source and destination of INSN.  If more than one, can't
1163      combine.  */
1164 
1165   if (GET_CODE (PATTERN (insn)) == SET)
1166     set = PATTERN (insn);
1167   else if (GET_CODE (PATTERN (insn)) == PARALLEL
1168 	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1169     {
1170       for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1171 	{
1172 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
1173 	  rtx note;
1174 
1175 	  switch (GET_CODE (elt))
1176 	    {
1177 	    /* This is important to combine floating point insns
1178 	       for the SH4 port.  */
1179 	    case USE:
1180 	      /* Combining an isolated USE doesn't make sense.
1181 		 We depend here on combinable_i3pat to reject them.  */
1182 	      /* The code below this loop only verifies that the inputs of
1183 		 the SET in INSN do not change.  We call reg_set_between_p
1184 		 to verify that the REG in the USE does not change between
1185 		 I3 and INSN.
1186 		 If the USE in INSN was for a pseudo register, the matching
1187 		 insn pattern will likely match any register; combining this
1188 		 with any other USE would only be safe if we knew that the
1189 		 used registers have identical values, or if there was
1190 		 something to tell them apart, e.g. different modes.  For
1191 		 now, we forgo such complicated tests and simply disallow
1192 		 combining of USES of pseudo registers with any other USE.  */
1193 	      if (REG_P (XEXP (elt, 0))
1194 		  && GET_CODE (PATTERN (i3)) == PARALLEL)
1195 		{
1196 		  rtx i3pat = PATTERN (i3);
1197 		  int i = XVECLEN (i3pat, 0) - 1;
1198 		  unsigned int regno = REGNO (XEXP (elt, 0));
1199 
1200 		  do
1201 		    {
1202 		      rtx i3elt = XVECEXP (i3pat, 0, i);
1203 
1204 		      if (GET_CODE (i3elt) == USE
1205 			  && REG_P (XEXP (i3elt, 0))
1206 			  && (REGNO (XEXP (i3elt, 0)) == regno
1207 			      ? reg_set_between_p (XEXP (elt, 0),
1208 						   PREV_INSN (insn), i3)
1209 			      : regno >= FIRST_PSEUDO_REGISTER))
1210 			return 0;
1211 		    }
1212 		  while (--i >= 0);
1213 		}
1214 	      break;
1215 
1216 	      /* We can ignore CLOBBERs.  */
1217 	    case CLOBBER:
1218 	      break;
1219 
1220 	    case SET:
1221 	      /* Ignore SETs whose result isn't used but not those that
1222 		 have side-effects.  */
1223 	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1224 		  && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX))
1225 		      || INTVAL (XEXP (note, 0)) <= 0)
1226 		  && ! side_effects_p (elt))
1227 		break;
1228 
1229 	      /* If we have already found a SET, this is a second one and
1230 		 so we cannot combine with this insn.  */
1231 	      if (set)
1232 		return 0;
1233 
1234 	      set = elt;
1235 	      break;
1236 
1237 	    default:
1238 	      /* Anything else means we can't combine.  */
1239 	      return 0;
1240 	    }
1241 	}
1242 
1243       if (set == 0
1244 	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1245 	     so don't do anything with it.  */
1246 	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1247 	return 0;
1248     }
1249   else
1250     return 0;
1251 
1252   if (set == 0)
1253     return 0;
1254 
1255   set = expand_field_assignment (set);
1256   src = SET_SRC (set), dest = SET_DEST (set);
1257 
1258   /* Don't eliminate a store in the stack pointer.  */
1259   if (dest == stack_pointer_rtx
1260       /* Don't combine with an insn that sets a register to itself if it has
1261 	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1262       || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1263       /* Can't merge an ASM_OPERANDS.  */
1264       || GET_CODE (src) == ASM_OPERANDS
1265       /* Can't merge a function call.  */
1266       || GET_CODE (src) == CALL
1267       /* Don't eliminate a function call argument.  */
1268       || (CALL_P (i3)
1269 	  && (find_reg_fusage (i3, USE, dest)
1270 	      || (REG_P (dest)
1271 		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1272 		  && global_regs[REGNO (dest)])))
1273       /* Don't substitute into an incremented register.  */
1274       || FIND_REG_INC_NOTE (i3, dest)
1275       || (succ && FIND_REG_INC_NOTE (succ, dest))
1276       /* Don't substitute into a non-local goto, this confuses CFG.  */
1277       || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1278 #if 0
1279       /* Don't combine the end of a libcall into anything.  */
1280       /* ??? This gives worse code, and appears to be unnecessary, since no
1281 	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1282 	 use REG_RETVAL notes for noconflict blocks, but other code here
1283 	 makes sure that those insns don't disappear.  */
1284       || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1285 #endif
1286       /* Make sure that DEST is not used after SUCC but before I3.  */
1287       || (succ && ! all_adjacent
1288 	  && reg_used_between_p (dest, succ, i3))
1289       /* Make sure that the value that is to be substituted for the register
1290 	 does not use any registers whose values alter in between.  However,
1291 	 If the insns are adjacent, a use can't cross a set even though we
1292 	 think it might (this can happen for a sequence of insns each setting
1293 	 the same destination; last_set of that register might point to
1294 	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
1295 	 equivalent to the memory so the substitution is valid even if there
1296 	 are intervening stores.  Also, don't move a volatile asm or
1297 	 UNSPEC_VOLATILE across any other insns.  */
1298       || (! all_adjacent
1299 	  && (((!MEM_P (src)
1300 		|| ! find_reg_note (insn, REG_EQUIV, src))
1301 	       && use_crosses_set_p (src, INSN_CUID (insn)))
1302 	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1303 	      || GET_CODE (src) == UNSPEC_VOLATILE))
1304       /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1305 	 better register allocation by not doing the combine.  */
1306       || find_reg_note (i3, REG_NO_CONFLICT, dest)
1307       || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1308       /* Don't combine across a CALL_INSN, because that would possibly
1309 	 change whether the life span of some REGs crosses calls or not,
1310 	 and it is a pain to update that information.
1311 	 Exception: if source is a constant, moving it later can't hurt.
1312 	 Accept that special case, because it helps -fforce-addr a lot.  */
1313       || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1314     return 0;
1315 
1316   /* DEST must either be a REG or CC0.  */
1317   if (REG_P (dest))
1318     {
1319       /* If register alignment is being enforced for multi-word items in all
1320 	 cases except for parameters, it is possible to have a register copy
1321 	 insn referencing a hard register that is not allowed to contain the
1322 	 mode being copied and which would not be valid as an operand of most
1323 	 insns.  Eliminate this problem by not combining with such an insn.
1324 
1325 	 Also, on some machines we don't want to extend the life of a hard
1326 	 register.  */
1327 
1328       if (REG_P (src)
1329 	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1330 	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1331 	      /* Don't extend the life of a hard register unless it is
1332 		 user variable (if we have few registers) or it can't
1333 		 fit into the desired register (meaning something special
1334 		 is going on).
1335 		 Also avoid substituting a return register into I3, because
1336 		 reload can't handle a conflict with constraints of other
1337 		 inputs.  */
1338 	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1339 		  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1340 	return 0;
1341     }
1342   else if (GET_CODE (dest) != CC0)
1343     return 0;
1344 
1345 
1346   if (GET_CODE (PATTERN (i3)) == PARALLEL)
1347     for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1348       if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1349 	{
1350 	  /* Don't substitute for a register intended as a clobberable
1351 	     operand.  */
1352 	  rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1353 	  if (rtx_equal_p (reg, dest))
1354 	    return 0;
1355 
1356 	  /* If the clobber represents an earlyclobber operand, we must not
1357 	     substitute an expression containing the clobbered register.
1358 	     As we do not analyze the constraint strings here, we have to
1359 	     make the conservative assumption.  However, if the register is
1360 	     a fixed hard reg, the clobber cannot represent any operand;
1361 	     we leave it up to the machine description to either accept or
1362 	     reject use-and-clobber patterns.  */
1363 	  if (!REG_P (reg)
1364 	      || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1365 	      || !fixed_regs[REGNO (reg)])
1366 	    if (reg_overlap_mentioned_p (reg, src))
1367 	      return 0;
1368 	}
1369 
1370   /* If INSN contains anything volatile, or is an `asm' (whether volatile
1371      or not), reject, unless nothing volatile comes between it and I3 */
1372 
1373   if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1374     {
1375       /* Make sure succ doesn't contain a volatile reference.  */
1376       if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1377 	return 0;
1378 
1379       for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1380 	if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1381 	  return 0;
1382     }
1383 
1384   /* If INSN is an asm, and DEST is a hard register, reject, since it has
1385      to be an explicit register variable, and was chosen for a reason.  */
1386 
1387   if (GET_CODE (src) == ASM_OPERANDS
1388       && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1389     return 0;
1390 
1391   /* If there are any volatile insns between INSN and I3, reject, because
1392      they might affect machine state.  */
1393 
1394   for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1395     if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1396       return 0;
1397 
1398   /* If INSN contains an autoincrement or autodecrement, make sure that
1399      register is not used between there and I3, and not already used in
1400      I3 either.  Neither must it be used in PRED or SUCC, if they exist.
1401      Also insist that I3 not be a jump; if it were one
1402      and the incremented register were spilled, we would lose.  */
1403 
1404 #ifdef AUTO_INC_DEC
1405   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1406     if (REG_NOTE_KIND (link) == REG_INC
1407 	&& (JUMP_P (i3)
1408 	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1409 	    || (pred != NULL_RTX
1410 		&& reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1411 	    || (succ != NULL_RTX
1412 		&& reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1413 	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1414       return 0;
1415 #endif
1416 
1417 #ifdef HAVE_cc0
1418   /* Don't combine an insn that follows a CC0-setting insn.
1419      An insn that uses CC0 must not be separated from the one that sets it.
1420      We do, however, allow I2 to follow a CC0-setting insn if that insn
1421      is passed as I1; in that case it will be deleted also.
1422      We also allow combining in this case if all the insns are adjacent
1423      because that would leave the two CC0 insns adjacent as well.
1424      It would be more logical to test whether CC0 occurs inside I1 or I2,
1425      but that would be much slower, and this ought to be equivalent.  */
1426 
1427   p = prev_nonnote_insn (insn);
1428   if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1429       && ! all_adjacent)
1430     return 0;
1431 #endif
1432 
1433   /* If we get here, we have passed all the tests and the combination is
1434      to be allowed.  */
1435 
1436   *pdest = dest;
1437   *psrc = src;
1438 
1439   return 1;
1440 }
1441 
1442 /* LOC is the location within I3 that contains its pattern or the component
1443    of a PARALLEL of the pattern.  We validate that it is valid for combining.
1444 
1445    One problem is if I3 modifies its output, as opposed to replacing it
1446    entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1447    so would produce an insn that is not equivalent to the original insns.
1448 
1449    Consider:
1450 
1451 	 (set (reg:DI 101) (reg:DI 100))
1452 	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1453 
1454    This is NOT equivalent to:
1455 
1456 	 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1457 		    (set (reg:DI 101) (reg:DI 100))])
1458 
1459    Not only does this modify 100 (in which case it might still be valid
1460    if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1461 
1462    We can also run into a problem if I2 sets a register that I1
1463    uses and I1 gets directly substituted into I3 (not via I2).  In that
1464    case, we would be getting the wrong value of I2DEST into I3, so we
1465    must reject the combination.  This case occurs when I2 and I1 both
1466    feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1467    If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1468    of a SET must prevent combination from occurring.
1469 
1470    Before doing the above check, we first try to expand a field assignment
1471    into a set of logical operations.
1472 
1473    If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1474    we place a register that is both set and used within I3.  If more than one
1475    such register is detected, we fail.
1476 
1477    Return 1 if the combination is valid, zero otherwise.  */
1478 
1479 static int
combinable_i3pat(rtx i3,rtx * loc,rtx i2dest,rtx i1dest,int i1_not_in_src,rtx * pi3dest_killed)1480 combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1481 		  int i1_not_in_src, rtx *pi3dest_killed)
1482 {
1483   rtx x = *loc;
1484 
1485   if (GET_CODE (x) == SET)
1486     {
1487       rtx set = x ;
1488       rtx dest = SET_DEST (set);
1489       rtx src = SET_SRC (set);
1490       rtx inner_dest = dest;
1491       rtx subdest;
1492 
1493       while (GET_CODE (inner_dest) == STRICT_LOW_PART
1494 	     || GET_CODE (inner_dest) == SUBREG
1495 	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1496 	inner_dest = XEXP (inner_dest, 0);
1497 
1498       /* Check for the case where I3 modifies its output, as discussed
1499 	 above.  We don't want to prevent pseudos from being combined
1500 	 into the address of a MEM, so only prevent the combination if
1501 	 i1 or i2 set the same MEM.  */
1502       if ((inner_dest != dest &&
1503 	   (!MEM_P (inner_dest)
1504 	    || rtx_equal_p (i2dest, inner_dest)
1505 	    || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1506 	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1507 	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1508 
1509 	  /* This is the same test done in can_combine_p except we can't test
1510 	     all_adjacent; we don't have to, since this instruction will stay
1511 	     in place, thus we are not considering increasing the lifetime of
1512 	     INNER_DEST.
1513 
1514 	     Also, if this insn sets a function argument, combining it with
1515 	     something that might need a spill could clobber a previous
1516 	     function argument; the all_adjacent test in can_combine_p also
1517 	     checks this; here, we do a more specific test for this case.  */
1518 
1519 	  || (REG_P (inner_dest)
1520 	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1521 	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1522 					GET_MODE (inner_dest))))
1523 	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1524 	return 0;
1525 
1526       /* If DEST is used in I3, it is being killed in this insn, so
1527 	 record that for later.  We have to consider paradoxical
1528 	 subregs here, since they kill the whole register, but we
1529 	 ignore partial subregs, STRICT_LOW_PART, etc.
1530 	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1531 	 STACK_POINTER_REGNUM, since these are always considered to be
1532 	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1533       subdest = dest;
1534       if (GET_CODE (subdest) == SUBREG
1535 	  && (GET_MODE_SIZE (GET_MODE (subdest))
1536 	      >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1537 	subdest = SUBREG_REG (subdest);
1538       if (pi3dest_killed
1539 	  && REG_P (subdest)
1540 	  && reg_referenced_p (subdest, PATTERN (i3))
1541 	  && REGNO (subdest) != FRAME_POINTER_REGNUM
1542 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1543 	  && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1544 #endif
1545 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1546 	  && (REGNO (subdest) != ARG_POINTER_REGNUM
1547 	      || ! fixed_regs [REGNO (subdest)])
1548 #endif
1549 	  && REGNO (subdest) != STACK_POINTER_REGNUM)
1550 	{
1551 	  if (*pi3dest_killed)
1552 	    return 0;
1553 
1554 	  *pi3dest_killed = subdest;
1555 	}
1556     }
1557 
1558   else if (GET_CODE (x) == PARALLEL)
1559     {
1560       int i;
1561 
1562       for (i = 0; i < XVECLEN (x, 0); i++)
1563 	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1564 				i1_not_in_src, pi3dest_killed))
1565 	  return 0;
1566     }
1567 
1568   return 1;
1569 }
1570 
1571 /* Return 1 if X is an arithmetic expression that contains a multiplication
1572    and division.  We don't count multiplications by powers of two here.  */
1573 
1574 static int
contains_muldiv(rtx x)1575 contains_muldiv (rtx x)
1576 {
1577   switch (GET_CODE (x))
1578     {
1579     case MOD:  case DIV:  case UMOD:  case UDIV:
1580       return 1;
1581 
1582     case MULT:
1583       return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1584 		&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1585     default:
1586       if (BINARY_P (x))
1587 	return contains_muldiv (XEXP (x, 0))
1588 	    || contains_muldiv (XEXP (x, 1));
1589 
1590       if (UNARY_P (x))
1591 	return contains_muldiv (XEXP (x, 0));
1592 
1593       return 0;
1594     }
1595 }
1596 
1597 /* Determine whether INSN can be used in a combination.  Return nonzero if
1598    not.  This is used in try_combine to detect early some cases where we
1599    can't perform combinations.  */
1600 
1601 static int
cant_combine_insn_p(rtx insn)1602 cant_combine_insn_p (rtx insn)
1603 {
1604   rtx set;
1605   rtx src, dest;
1606 
1607   /* If this isn't really an insn, we can't do anything.
1608      This can occur when flow deletes an insn that it has merged into an
1609      auto-increment address.  */
1610   if (! INSN_P (insn))
1611     return 1;
1612 
1613   /* Never combine loads and stores involving hard regs that are likely
1614      to be spilled.  The register allocator can usually handle such
1615      reg-reg moves by tying.  If we allow the combiner to make
1616      substitutions of likely-spilled regs, reload might die.
1617      As an exception, we allow combinations involving fixed regs; these are
1618      not available to the register allocator so there's no risk involved.  */
1619 
1620   set = single_set (insn);
1621   if (! set)
1622     return 0;
1623   src = SET_SRC (set);
1624   dest = SET_DEST (set);
1625   if (GET_CODE (src) == SUBREG)
1626     src = SUBREG_REG (src);
1627   if (GET_CODE (dest) == SUBREG)
1628     dest = SUBREG_REG (dest);
1629   if (REG_P (src) && REG_P (dest)
1630       && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1631 	   && ! fixed_regs[REGNO (src)]
1632 	   && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
1633 	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1634 	      && ! fixed_regs[REGNO (dest)]
1635 	      && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
1636     return 1;
1637 
1638   return 0;
1639 }
1640 
1641 struct likely_spilled_retval_info
1642 {
1643   unsigned regno, nregs;
1644   unsigned mask;
1645 };
1646 
1647 /* Called via note_stores by likely_spilled_retval_p.  Remove from info->mask
1648    hard registers that are known to be written to / clobbered in full.  */
1649 static void
likely_spilled_retval_1(rtx x,rtx set,void * data)1650 likely_spilled_retval_1 (rtx x, rtx set, void *data)
1651 {
1652   struct likely_spilled_retval_info *info = data;
1653   unsigned regno, nregs;
1654   unsigned new_mask;
1655 
1656   if (!REG_P (XEXP (set, 0)))
1657     return;
1658   regno = REGNO (x);
1659   if (regno >= info->regno + info->nregs)
1660     return;
1661   nregs = hard_regno_nregs[regno][GET_MODE (x)];
1662   if (regno + nregs <= info->regno)
1663     return;
1664   new_mask = (2U << (nregs - 1)) - 1;
1665   if (regno < info->regno)
1666     new_mask >>= info->regno - regno;
1667   else
1668     new_mask <<= regno - info->regno;
1669   info->mask &= new_mask;
1670 }
1671 
1672 /* Return nonzero iff part of the return value is live during INSN, and
1673    it is likely spilled.  This can happen when more than one insn is needed
1674    to copy the return value, e.g. when we consider to combine into the
1675    second copy insn for a complex value.  */
1676 
1677 static int
likely_spilled_retval_p(rtx insn)1678 likely_spilled_retval_p (rtx insn)
1679 {
1680   rtx use = BB_END (this_basic_block);
1681   rtx reg, p;
1682   unsigned regno, nregs;
1683   /* We assume here that no machine mode needs more than
1684      32 hard registers when the value overlaps with a register
1685      for which FUNCTION_VALUE_REGNO_P is true.  */
1686   unsigned mask;
1687   struct likely_spilled_retval_info info;
1688 
1689   if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
1690     return 0;
1691   reg = XEXP (PATTERN (use), 0);
1692   if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
1693     return 0;
1694   regno = REGNO (reg);
1695   nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1696   if (nregs == 1)
1697     return 0;
1698   mask = (2U << (nregs - 1)) - 1;
1699 
1700   /* Disregard parts of the return value that are set later.  */
1701   info.regno = regno;
1702   info.nregs = nregs;
1703   info.mask = mask;
1704   for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
1705     note_stores (PATTERN (insn), likely_spilled_retval_1, &info);
1706   mask = info.mask;
1707 
1708   /* Check if any of the (probably) live return value registers is
1709      likely spilled.  */
1710   nregs --;
1711   do
1712     {
1713       if ((mask & 1 << nregs)
1714 	  && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
1715 	return 1;
1716     } while (nregs--);
1717   return 0;
1718 }
1719 
1720 /* Adjust INSN after we made a change to its destination.
1721 
1722    Changing the destination can invalidate notes that say something about
1723    the results of the insn and a LOG_LINK pointing to the insn.  */
1724 
1725 static void
adjust_for_new_dest(rtx insn)1726 adjust_for_new_dest (rtx insn)
1727 {
1728   rtx *loc;
1729 
1730   /* For notes, be conservative and simply remove them.  */
1731   loc = &REG_NOTES (insn);
1732   while (*loc)
1733     {
1734       enum reg_note kind = REG_NOTE_KIND (*loc);
1735       if (kind == REG_EQUAL || kind == REG_EQUIV)
1736 	*loc = XEXP (*loc, 1);
1737       else
1738 	loc = &XEXP (*loc, 1);
1739     }
1740 
1741   /* The new insn will have a destination that was previously the destination
1742      of an insn just above it.  Call distribute_links to make a LOG_LINK from
1743      the next use of that destination.  */
1744   distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1745 }
1746 
1747 /* Return TRUE if combine can reuse reg X in mode MODE.
1748    ADDED_SETS is nonzero if the original set is still required.  */
1749 static bool
can_change_dest_mode(rtx x,int added_sets,enum machine_mode mode)1750 can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
1751 {
1752   unsigned int regno;
1753 
1754   if (!REG_P(x))
1755     return false;
1756 
1757   regno = REGNO (x);
1758   /* Allow hard registers if the new mode is legal, and occupies no more
1759      registers than the old mode.  */
1760   if (regno < FIRST_PSEUDO_REGISTER)
1761     return (HARD_REGNO_MODE_OK (regno, mode)
1762 	    && (hard_regno_nregs[regno][GET_MODE (x)]
1763 		>= hard_regno_nregs[regno][mode]));
1764 
1765   /* Or a pseudo that is only used once.  */
1766   return (REG_N_SETS (regno) == 1 && !added_sets
1767 	  && !REG_USERVAR_P (x));
1768 }
1769 
1770 
1771 /* Check whether X, the destination of a set, refers to part of
1772    the register specified by REG.  */
1773 
1774 static bool
reg_subword_p(rtx x,rtx reg)1775 reg_subword_p (rtx x, rtx reg)
1776 {
1777   /* Check that reg is an integer mode register.  */
1778   if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
1779     return false;
1780 
1781   if (GET_CODE (x) == STRICT_LOW_PART
1782       || GET_CODE (x) == ZERO_EXTRACT)
1783     x = XEXP (x, 0);
1784 
1785   return GET_CODE (x) == SUBREG
1786 	 && SUBREG_REG (x) == reg
1787 	 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
1788 }
1789 
1790 
1791 /* Try to combine the insns I1 and I2 into I3.
1792    Here I1 and I2 appear earlier than I3.
1793    I1 can be zero; then we combine just I2 into I3.
1794 
1795    If we are combining three insns and the resulting insn is not recognized,
1796    try splitting it into two insns.  If that happens, I2 and I3 are retained
1797    and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1798    are pseudo-deleted.
1799 
1800    Return 0 if the combination does not work.  Then nothing is changed.
1801    If we did the combination, return the insn at which combine should
1802    resume scanning.
1803 
1804    Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1805    new direct jump instruction.  */
1806 
1807 static rtx
try_combine(rtx i3,rtx i2,rtx i1,int * new_direct_jump_p)1808 try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
1809 {
1810   /* New patterns for I3 and I2, respectively.  */
1811   rtx newpat, newi2pat = 0;
1812   rtvec newpat_vec_with_clobbers = 0;
1813   int substed_i2 = 0, substed_i1 = 0;
1814   /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1815   int added_sets_1, added_sets_2;
1816   /* Total number of SETs to put into I3.  */
1817   int total_sets;
1818   /* Nonzero if I2's body now appears in I3.  */
1819   int i2_is_used;
1820   /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1821   int insn_code_number, i2_code_number = 0, other_code_number = 0;
1822   /* Contains I3 if the destination of I3 is used in its source, which means
1823      that the old life of I3 is being killed.  If that usage is placed into
1824      I2 and not in I3, a REG_DEAD note must be made.  */
1825   rtx i3dest_killed = 0;
1826   /* SET_DEST and SET_SRC of I2 and I1.  */
1827   rtx i2dest, i2src, i1dest = 0, i1src = 0;
1828   /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases.  */
1829   rtx i1pat = 0, i2pat = 0;
1830   /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1831   int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1832   int i2dest_killed = 0, i1dest_killed = 0;
1833   int i1_feeds_i3 = 0;
1834   /* Notes that must be added to REG_NOTES in I3 and I2.  */
1835   rtx new_i3_notes, new_i2_notes;
1836   /* Notes that we substituted I3 into I2 instead of the normal case.  */
1837   int i3_subst_into_i2 = 0;
1838   /* Notes that I1, I2 or I3 is a MULT operation.  */
1839   int have_mult = 0;
1840   int swap_i2i3 = 0;
1841 
1842   int maxreg;
1843   rtx temp;
1844   rtx link;
1845   int i;
1846 
1847   /* Exit early if one of the insns involved can't be used for
1848      combinations.  */
1849   if (cant_combine_insn_p (i3)
1850       || cant_combine_insn_p (i2)
1851       || (i1 && cant_combine_insn_p (i1))
1852       || likely_spilled_retval_p (i3)
1853       /* We also can't do anything if I3 has a
1854 	 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1855 	 libcall.  */
1856 #if 0
1857       /* ??? This gives worse code, and appears to be unnecessary, since no
1858 	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1859       || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1860 #endif
1861       )
1862     return 0;
1863 
1864   combine_attempts++;
1865   undobuf.other_insn = 0;
1866 
1867   /* Reset the hard register usage information.  */
1868   CLEAR_HARD_REG_SET (newpat_used_regs);
1869 
1870   /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1871      code below, set I1 to be the earlier of the two insns.  */
1872   if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1873     temp = i1, i1 = i2, i2 = temp;
1874 
1875   added_links_insn = 0;
1876 
1877   /* First check for one important special-case that the code below will
1878      not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1879      and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1880      we may be able to replace that destination with the destination of I3.
1881      This occurs in the common code where we compute both a quotient and
1882      remainder into a structure, in which case we want to do the computation
1883      directly into the structure to avoid register-register copies.
1884 
1885      Note that this case handles both multiple sets in I2 and also
1886      cases where I2 has a number of CLOBBER or PARALLELs.
1887 
1888      We make very conservative checks below and only try to handle the
1889      most common cases of this.  For example, we only handle the case
1890      where I2 and I3 are adjacent to avoid making difficult register
1891      usage tests.  */
1892 
1893   if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
1894       && REG_P (SET_SRC (PATTERN (i3)))
1895       && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1896       && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1897       && GET_CODE (PATTERN (i2)) == PARALLEL
1898       && ! side_effects_p (SET_DEST (PATTERN (i3)))
1899       /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1900 	 below would need to check what is inside (and reg_overlap_mentioned_p
1901 	 doesn't support those codes anyway).  Don't allow those destinations;
1902 	 the resulting insn isn't likely to be recognized anyway.  */
1903       && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1904       && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1905       && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1906 				    SET_DEST (PATTERN (i3)))
1907       && next_real_insn (i2) == i3)
1908     {
1909       rtx p2 = PATTERN (i2);
1910 
1911       /* Make sure that the destination of I3,
1912 	 which we are going to substitute into one output of I2,
1913 	 is not used within another output of I2.  We must avoid making this:
1914 	 (parallel [(set (mem (reg 69)) ...)
1915 		    (set (reg 69) ...)])
1916 	 which is not well-defined as to order of actions.
1917 	 (Besides, reload can't handle output reloads for this.)
1918 
1919 	 The problem can also happen if the dest of I3 is a memory ref,
1920 	 if another dest in I2 is an indirect memory ref.  */
1921       for (i = 0; i < XVECLEN (p2, 0); i++)
1922 	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1923 	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1924 	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1925 					SET_DEST (XVECEXP (p2, 0, i))))
1926 	  break;
1927 
1928       if (i == XVECLEN (p2, 0))
1929 	for (i = 0; i < XVECLEN (p2, 0); i++)
1930 	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1931 	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1932 	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1933 	    {
1934 	      combine_merges++;
1935 
1936 	      subst_insn = i3;
1937 	      subst_low_cuid = INSN_CUID (i2);
1938 
1939 	      added_sets_2 = added_sets_1 = 0;
1940 	      i2dest = SET_SRC (PATTERN (i3));
1941 	      i2dest_killed = dead_or_set_p (i2, i2dest);
1942 
1943 	      /* Replace the dest in I2 with our dest and make the resulting
1944 		 insn the new pattern for I3.  Then skip to where we
1945 		 validate the pattern.  Everything was set up above.  */
1946 	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1947 		     SET_DEST (PATTERN (i3)));
1948 
1949 	      newpat = p2;
1950 	      i3_subst_into_i2 = 1;
1951 	      goto validate_replacement;
1952 	    }
1953     }
1954 
1955   /* If I2 is setting a pseudo to a constant and I3 is setting some
1956      sub-part of it to another constant, merge them by making a new
1957      constant.  */
1958   if (i1 == 0
1959       && (temp = single_set (i2)) != 0
1960       && (GET_CODE (SET_SRC (temp)) == CONST_INT
1961 	  || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1962       && GET_CODE (PATTERN (i3)) == SET
1963       && (GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT
1964 	  || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
1965       && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
1966     {
1967       rtx dest = SET_DEST (PATTERN (i3));
1968       int offset = -1;
1969       int width = 0;
1970 
1971       if (GET_CODE (dest) == ZERO_EXTRACT)
1972 	{
1973 	  if (GET_CODE (XEXP (dest, 1)) == CONST_INT
1974 	      && GET_CODE (XEXP (dest, 2)) == CONST_INT)
1975 	    {
1976 	      width = INTVAL (XEXP (dest, 1));
1977 	      offset = INTVAL (XEXP (dest, 2));
1978 	      dest = XEXP (dest, 0);
1979 	      if (BITS_BIG_ENDIAN)
1980 		offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset;
1981 	    }
1982 	}
1983       else
1984 	{
1985 	  if (GET_CODE (dest) == STRICT_LOW_PART)
1986 	    dest = XEXP (dest, 0);
1987 	  width = GET_MODE_BITSIZE (GET_MODE (dest));
1988 	  offset = 0;
1989 	}
1990 
1991       if (offset >= 0)
1992 	{
1993 	  /* If this is the low part, we're done.  */
1994 	  if (subreg_lowpart_p (dest))
1995 	    ;
1996 	  /* Handle the case where inner is twice the size of outer.  */
1997 	  else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
1998 		   == 2 * GET_MODE_BITSIZE (GET_MODE (dest)))
1999 	    offset += GET_MODE_BITSIZE (GET_MODE (dest));
2000 	  /* Otherwise give up for now.  */
2001 	  else
2002 	    offset = -1;
2003 	}
2004 
2005       if (offset >= 0)
2006 	{
2007 	  HOST_WIDE_INT mhi, ohi, ihi;
2008 	  HOST_WIDE_INT mlo, olo, ilo;
2009 	  rtx inner = SET_SRC (PATTERN (i3));
2010 	  rtx outer = SET_SRC (temp);
2011 
2012 	  if (GET_CODE (outer) == CONST_INT)
2013 	    {
2014 	      olo = INTVAL (outer);
2015 	      ohi = olo < 0 ? -1 : 0;
2016 	    }
2017 	  else
2018 	    {
2019 	      olo = CONST_DOUBLE_LOW (outer);
2020 	      ohi = CONST_DOUBLE_HIGH (outer);
2021 	    }
2022 
2023 	  if (GET_CODE (inner) == CONST_INT)
2024 	    {
2025 	      ilo = INTVAL (inner);
2026 	      ihi = ilo < 0 ? -1 : 0;
2027 	    }
2028 	  else
2029 	    {
2030 	      ilo = CONST_DOUBLE_LOW (inner);
2031 	      ihi = CONST_DOUBLE_HIGH (inner);
2032 	    }
2033 
2034 	  if (width < HOST_BITS_PER_WIDE_INT)
2035 	    {
2036 	      mlo = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
2037 	      mhi = 0;
2038 	    }
2039 	  else if (width < HOST_BITS_PER_WIDE_INT * 2)
2040 	    {
2041 	      mhi = ((unsigned HOST_WIDE_INT) 1
2042 		     << (width - HOST_BITS_PER_WIDE_INT)) - 1;
2043 	      mlo = -1;
2044 	    }
2045 	  else
2046 	    {
2047 	      mlo = -1;
2048 	      mhi = -1;
2049 	    }
2050 
2051 	  ilo &= mlo;
2052 	  ihi &= mhi;
2053 
2054 	  if (offset >= HOST_BITS_PER_WIDE_INT)
2055 	    {
2056 	      mhi = mlo << (offset - HOST_BITS_PER_WIDE_INT);
2057 	      mlo = 0;
2058 	      ihi = ilo << (offset - HOST_BITS_PER_WIDE_INT);
2059 	      ilo = 0;
2060 	    }
2061 	  else if (offset > 0)
2062 	    {
2063 	      mhi = (mhi << offset) | ((unsigned HOST_WIDE_INT) mlo
2064 		     		       >> (HOST_BITS_PER_WIDE_INT - offset));
2065 	      mlo = mlo << offset;
2066 	      ihi = (ihi << offset) | ((unsigned HOST_WIDE_INT) ilo
2067 		     		       >> (HOST_BITS_PER_WIDE_INT - offset));
2068 	      ilo = ilo << offset;
2069 	    }
2070 
2071 	  olo = (olo & ~mlo) | ilo;
2072 	  ohi = (ohi & ~mhi) | ihi;
2073 
2074 	  combine_merges++;
2075 	  subst_insn = i3;
2076 	  subst_low_cuid = INSN_CUID (i2);
2077 	  added_sets_2 = added_sets_1 = 0;
2078 	  i2dest = SET_DEST (temp);
2079 	  i2dest_killed = dead_or_set_p (i2, i2dest);
2080 
2081 	  SUBST (SET_SRC (temp),
2082 		 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp))));
2083 
2084 	  newpat = PATTERN (i2);
2085 	  goto validate_replacement;
2086 	}
2087     }
2088 
2089 #ifndef HAVE_cc0
2090   /* If we have no I1 and I2 looks like:
2091 	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2092 		   (set Y OP)])
2093      make up a dummy I1 that is
2094 	(set Y OP)
2095      and change I2 to be
2096 	(set (reg:CC X) (compare:CC Y (const_int 0)))
2097 
2098      (We can ignore any trailing CLOBBERs.)
2099 
2100      This undoes a previous combination and allows us to match a branch-and-
2101      decrement insn.  */
2102 
2103   if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
2104       && XVECLEN (PATTERN (i2), 0) >= 2
2105       && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
2106       && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2107 	  == MODE_CC)
2108       && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2109       && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2110       && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
2111       && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
2112       && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2113 		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
2114     {
2115       for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
2116 	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
2117 	  break;
2118 
2119       if (i == 1)
2120 	{
2121 	  /* We make I1 with the same INSN_UID as I2.  This gives it
2122 	     the same INSN_CUID for value tracking.  Our fake I1 will
2123 	     never appear in the insn stream so giving it the same INSN_UID
2124 	     as I2 will not cause a problem.  */
2125 
2126 	  i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
2127 			     BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
2128 			     XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
2129 			     NULL_RTX);
2130 
2131 	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2132 	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2133 		 SET_DEST (PATTERN (i1)));
2134 	}
2135     }
2136 #endif
2137 
2138   /* Verify that I2 and I1 are valid for combining.  */
2139   if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
2140       || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
2141     {
2142       undo_all ();
2143       return 0;
2144     }
2145 
2146   /* Record whether I2DEST is used in I2SRC and similarly for the other
2147      cases.  Knowing this will help in register status updating below.  */
2148   i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2149   i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2150   i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2151   i2dest_killed = dead_or_set_p (i2, i2dest);
2152   i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2153 
2154   /* See if I1 directly feeds into I3.  It does if I1DEST is not used
2155      in I2SRC.  */
2156   i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2157 
2158   /* Ensure that I3's pattern can be the destination of combines.  */
2159   if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2160 			  i1 && i2dest_in_i1src && i1_feeds_i3,
2161 			  &i3dest_killed))
2162     {
2163       undo_all ();
2164       return 0;
2165     }
2166 
2167   /* See if any of the insns is a MULT operation.  Unless one is, we will
2168      reject a combination that is, since it must be slower.  Be conservative
2169      here.  */
2170   if (GET_CODE (i2src) == MULT
2171       || (i1 != 0 && GET_CODE (i1src) == MULT)
2172       || (GET_CODE (PATTERN (i3)) == SET
2173 	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2174     have_mult = 1;
2175 
2176   /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2177      We used to do this EXCEPT in one case: I3 has a post-inc in an
2178      output operand.  However, that exception can give rise to insns like
2179 	mov r3,(r3)+
2180      which is a famous insn on the PDP-11 where the value of r3 used as the
2181      source was model-dependent.  Avoid this sort of thing.  */
2182 
2183 #if 0
2184   if (!(GET_CODE (PATTERN (i3)) == SET
2185 	&& REG_P (SET_SRC (PATTERN (i3)))
2186 	&& MEM_P (SET_DEST (PATTERN (i3)))
2187 	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2188 	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2189     /* It's not the exception.  */
2190 #endif
2191 #ifdef AUTO_INC_DEC
2192     for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2193       if (REG_NOTE_KIND (link) == REG_INC
2194 	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2195 	      || (i1 != 0
2196 		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2197 	{
2198 	  undo_all ();
2199 	  return 0;
2200 	}
2201 #endif
2202 
2203   /* See if the SETs in I1 or I2 need to be kept around in the merged
2204      instruction: whenever the value set there is still needed past I3.
2205      For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2206 
2207      For the SET in I1, we have two cases:  If I1 and I2 independently
2208      feed into I3, the set in I1 needs to be kept around if I1DEST dies
2209      or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
2210      in I1 needs to be kept around unless I1DEST dies or is set in either
2211      I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
2212      I1DEST.  If so, we know I1 feeds into I2.  */
2213 
2214   added_sets_2 = ! dead_or_set_p (i3, i2dest);
2215 
2216   added_sets_1
2217     = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2218 	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2219 
2220   /* If the set in I2 needs to be kept around, we must make a copy of
2221      PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2222      PATTERN (I2), we are only substituting for the original I1DEST, not into
2223      an already-substituted copy.  This also prevents making self-referential
2224      rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2225      I2DEST.  */
2226 
2227   if (added_sets_2)
2228     {
2229       if (GET_CODE (PATTERN (i2)) == PARALLEL)
2230 	i2pat = gen_rtx_SET (VOIDmode, i2dest, copy_rtx (i2src));
2231       else
2232 	i2pat = copy_rtx (PATTERN (i2));
2233     }
2234 
2235   if (added_sets_1)
2236     {
2237       if (GET_CODE (PATTERN (i1)) == PARALLEL)
2238 	i1pat = gen_rtx_SET (VOIDmode, i1dest, copy_rtx (i1src));
2239       else
2240 	i1pat = copy_rtx (PATTERN (i1));
2241     }
2242 
2243   combine_merges++;
2244 
2245   /* Substitute in the latest insn for the regs set by the earlier ones.  */
2246 
2247   maxreg = max_reg_num ();
2248 
2249   subst_insn = i3;
2250 
2251 #ifndef HAVE_cc0
2252   /* Many machines that don't use CC0 have insns that can both perform an
2253      arithmetic operation and set the condition code.  These operations will
2254      be represented as a PARALLEL with the first element of the vector
2255      being a COMPARE of an arithmetic operation with the constant zero.
2256      The second element of the vector will set some pseudo to the result
2257      of the same arithmetic operation.  If we simplify the COMPARE, we won't
2258      match such a pattern and so will generate an extra insn.   Here we test
2259      for this case, where both the comparison and the operation result are
2260      needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2261      I2SRC.  Later we will make the PARALLEL that contains I2.  */
2262 
2263   if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2264       && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2265       && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2266       && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2267     {
2268 #ifdef SELECT_CC_MODE
2269       rtx *cc_use;
2270       enum machine_mode compare_mode;
2271 #endif
2272 
2273       newpat = PATTERN (i3);
2274       SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2275 
2276       i2_is_used = 1;
2277 
2278 #ifdef SELECT_CC_MODE
2279       /* See if a COMPARE with the operand we substituted in should be done
2280 	 with the mode that is currently being used.  If not, do the same
2281 	 processing we do in `subst' for a SET; namely, if the destination
2282 	 is used only once, try to replace it with a register of the proper
2283 	 mode and also replace the COMPARE.  */
2284       if (undobuf.other_insn == 0
2285 	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
2286 					&undobuf.other_insn))
2287 	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2288 					      i2src, const0_rtx))
2289 	      != GET_MODE (SET_DEST (newpat))))
2290 	{
2291 	  if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2292 				   compare_mode))
2293 	    {
2294 	      unsigned int regno = REGNO (SET_DEST (newpat));
2295 	      rtx new_dest;
2296 
2297 	      if (regno < FIRST_PSEUDO_REGISTER)
2298 		new_dest = gen_rtx_REG (compare_mode, regno);
2299 	      else
2300 		{
2301 		  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
2302 		  new_dest = regno_reg_rtx[regno];
2303 		}
2304 
2305 	      SUBST (SET_DEST (newpat), new_dest);
2306 	      SUBST (XEXP (*cc_use, 0), new_dest);
2307 	      SUBST (SET_SRC (newpat),
2308 		     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2309 	    }
2310 	  else
2311 	    undobuf.other_insn = 0;
2312 	}
2313 #endif
2314     }
2315   else
2316 #endif
2317     {
2318       /* It is possible that the source of I2 or I1 may be performing
2319 	 an unneeded operation, such as a ZERO_EXTEND of something
2320 	 that is known to have the high part zero.  Handle that case
2321 	 by letting subst look at the innermost one of them.
2322 
2323 	 Another way to do this would be to have a function that tries
2324 	 to simplify a single insn instead of merging two or more
2325 	 insns.  We don't do this because of the potential of infinite
2326 	 loops and because of the potential extra memory required.
2327 	 However, doing it the way we are is a bit of a kludge and
2328 	 doesn't catch all cases.
2329 
2330 	 But only do this if -fexpensive-optimizations since it slows
2331 	 things down and doesn't usually win.
2332 
2333 	 This is not done in the COMPARE case above because the
2334 	 unmodified I2PAT is used in the PARALLEL and so a pattern
2335 	 with a modified I2SRC would not match.  */
2336 
2337       if (flag_expensive_optimizations)
2338 	{
2339 	  /* Pass pc_rtx so no substitutions are done, just
2340 	     simplifications.  */
2341 	  if (i1)
2342 	    {
2343 	      subst_low_cuid = INSN_CUID (i1);
2344 	      i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2345 	    }
2346 	  else
2347 	    {
2348 	      subst_low_cuid = INSN_CUID (i2);
2349 	      i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2350 	    }
2351 	}
2352 
2353       n_occurrences = 0;		/* `subst' counts here */
2354 
2355       /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2356 	 need to make a unique copy of I2SRC each time we substitute it
2357 	 to avoid self-referential rtl.  */
2358 
2359       subst_low_cuid = INSN_CUID (i2);
2360       newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2361 		      ! i1_feeds_i3 && i1dest_in_i1src);
2362       substed_i2 = 1;
2363 
2364       /* Record whether i2's body now appears within i3's body.  */
2365       i2_is_used = n_occurrences;
2366     }
2367 
2368   /* If we already got a failure, don't try to do more.  Otherwise,
2369      try to substitute in I1 if we have it.  */
2370 
2371   if (i1 && GET_CODE (newpat) != CLOBBER)
2372     {
2373       /* Before we can do this substitution, we must redo the test done
2374 	 above (see detailed comments there) that ensures  that I1DEST
2375 	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
2376 
2377 #if !defined(OPENBSD_NATIVE) && !defined(OPENBSD_CROSS) /* GCC PR #34628 */
2378       if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
2379 			      0, (rtx*) 0))
2380 #endif
2381 	{
2382 	  undo_all ();
2383 	  return 0;
2384 	}
2385 
2386       n_occurrences = 0;
2387       subst_low_cuid = INSN_CUID (i1);
2388       newpat = subst (newpat, i1dest, i1src, 0, 0);
2389       substed_i1 = 1;
2390     }
2391 
2392   /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2393      to count all the ways that I2SRC and I1SRC can be used.  */
2394   if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2395        && i2_is_used + added_sets_2 > 1)
2396       || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2397 	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2398 	      > 1))
2399       /* Fail if we tried to make a new register.  */
2400       || max_reg_num () != maxreg
2401       /* Fail if we couldn't do something and have a CLOBBER.  */
2402       || GET_CODE (newpat) == CLOBBER
2403       /* Fail if this new pattern is a MULT and we didn't have one before
2404 	 at the outer level.  */
2405       || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2406 	  && ! have_mult))
2407     {
2408       undo_all ();
2409       return 0;
2410     }
2411 
2412   /* If the actions of the earlier insns must be kept
2413      in addition to substituting them into the latest one,
2414      we must make a new PARALLEL for the latest insn
2415      to hold additional the SETs.  */
2416 
2417   if (added_sets_1 || added_sets_2)
2418     {
2419       combine_extras++;
2420 
2421       if (GET_CODE (newpat) == PARALLEL)
2422 	{
2423 	  rtvec old = XVEC (newpat, 0);
2424 	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2425 	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2426 	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2427 		  sizeof (old->elem[0]) * old->num_elem);
2428 	}
2429       else
2430 	{
2431 	  rtx old = newpat;
2432 	  total_sets = 1 + added_sets_1 + added_sets_2;
2433 	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2434 	  XVECEXP (newpat, 0, 0) = old;
2435 	}
2436 
2437       if (added_sets_1)
2438 	XVECEXP (newpat, 0, --total_sets) = i1pat;
2439 
2440       if (added_sets_2)
2441 	{
2442 	  /* If there is no I1, use I2's body as is.  We used to also not do
2443 	     the subst call below if I2 was substituted into I3,
2444 	     but that could lose a simplification.  */
2445 	  if (i1 == 0)
2446 	    XVECEXP (newpat, 0, --total_sets) = i2pat;
2447 	  else
2448 	    /* See comment where i2pat is assigned.  */
2449 	    XVECEXP (newpat, 0, --total_sets)
2450 	      = subst (i2pat, i1dest, i1src, 0, 0);
2451 	}
2452     }
2453 
2454   /* We come here when we are replacing a destination in I2 with the
2455      destination of I3.  */
2456  validate_replacement:
2457 
2458   /* Note which hard regs this insn has as inputs.  */
2459   mark_used_regs_combine (newpat);
2460 
2461   /* If recog_for_combine fails, it strips existing clobbers.  If we'll
2462      consider splitting this pattern, we might need these clobbers.  */
2463   if (i1 && GET_CODE (newpat) == PARALLEL
2464       && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
2465     {
2466       int len = XVECLEN (newpat, 0);
2467 
2468       newpat_vec_with_clobbers = rtvec_alloc (len);
2469       for (i = 0; i < len; i++)
2470 	RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
2471     }
2472 
2473   /* Is the result of combination a valid instruction?  */
2474   insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2475 
2476   /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2477      the second SET's destination is a register that is unused and isn't
2478      marked as an instruction that might trap in an EH region.  In that case,
2479      we just need the first SET.   This can occur when simplifying a divmod
2480      insn.  We *must* test for this case here because the code below that
2481      splits two independent SETs doesn't handle this case correctly when it
2482      updates the register status.
2483 
2484      It's pointless doing this if we originally had two sets, one from
2485      i3, and one from i2.  Combining then splitting the parallel results
2486      in the original i2 again plus an invalid insn (which we delete).
2487      The net effect is only to move instructions around, which makes
2488      debug info less accurate.
2489 
2490      Also check the case where the first SET's destination is unused.
2491      That would not cause incorrect code, but does cause an unneeded
2492      insn to remain.  */
2493 
2494   if (insn_code_number < 0
2495       && !(added_sets_2 && i1 == 0)
2496       && GET_CODE (newpat) == PARALLEL
2497       && XVECLEN (newpat, 0) == 2
2498       && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2499       && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2500       && asm_noperands (newpat) < 0)
2501     {
2502       rtx set0 = XVECEXP (newpat, 0, 0);
2503       rtx set1 = XVECEXP (newpat, 0, 1);
2504       rtx note;
2505 
2506       if (((REG_P (SET_DEST (set1))
2507 	    && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
2508 	   || (GET_CODE (SET_DEST (set1)) == SUBREG
2509 	       && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
2510 	  && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2511 	      || INTVAL (XEXP (note, 0)) <= 0)
2512 	  && ! side_effects_p (SET_SRC (set1)))
2513 	{
2514 	  newpat = set0;
2515 	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2516 	}
2517 
2518       else if (((REG_P (SET_DEST (set0))
2519 		 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
2520 		|| (GET_CODE (SET_DEST (set0)) == SUBREG
2521 		    && find_reg_note (i3, REG_UNUSED,
2522 				      SUBREG_REG (SET_DEST (set0)))))
2523 	       && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2524 		   || INTVAL (XEXP (note, 0)) <= 0)
2525 	       && ! side_effects_p (SET_SRC (set0)))
2526 	{
2527 	  newpat = set1;
2528 	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2529 
2530 	  if (insn_code_number >= 0)
2531 	    {
2532 	      /* If we will be able to accept this, we have made a
2533 		 change to the destination of I3.  This requires us to
2534 		 do a few adjustments.  */
2535 
2536 	      PATTERN (i3) = newpat;
2537 	      adjust_for_new_dest (i3);
2538 	    }
2539 	}
2540     }
2541 
2542   /* If we were combining three insns and the result is a simple SET
2543      with no ASM_OPERANDS that wasn't recognized, try to split it into two
2544      insns.  There are two ways to do this.  It can be split using a
2545      machine-specific method (like when you have an addition of a large
2546      constant) or by combine in the function find_split_point.  */
2547 
2548   if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2549       && asm_noperands (newpat) < 0)
2550     {
2551       rtx m_split, *split;
2552 
2553       /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2554 	 use I2DEST as a scratch register will help.  In the latter case,
2555 	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
2556 
2557       m_split = split_insns (newpat, i3);
2558 
2559       /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2560 	 inputs of NEWPAT.  */
2561 
2562       /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2563 	 possible to try that as a scratch reg.  This would require adding
2564 	 more code to make it work though.  */
2565 
2566       if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
2567 	{
2568 	  enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
2569 
2570 	  /* First try to split using the original register as a
2571 	     scratch register.  */
2572 	  m_split = split_insns (gen_rtx_PARALLEL
2573 				 (VOIDmode,
2574 				  gen_rtvec (2, newpat,
2575 					     gen_rtx_CLOBBER (VOIDmode,
2576 							      i2dest))),
2577 				 i3);
2578 
2579 	  /* If that didn't work, try changing the mode of I2DEST if
2580 	     we can.  */
2581 	  if (m_split == 0
2582 	      && new_mode != GET_MODE (i2dest)
2583 	      && new_mode != VOIDmode
2584 	      && can_change_dest_mode (i2dest, added_sets_2, new_mode))
2585 	    {
2586 	      enum machine_mode old_mode = GET_MODE (i2dest);
2587 	      rtx ni2dest;
2588 
2589 	      if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
2590 		ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
2591 	      else
2592 		{
2593 		  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
2594 		  ni2dest = regno_reg_rtx[REGNO (i2dest)];
2595 		}
2596 
2597 	      m_split = split_insns (gen_rtx_PARALLEL
2598 				     (VOIDmode,
2599 				      gen_rtvec (2, newpat,
2600 						 gen_rtx_CLOBBER (VOIDmode,
2601 								  ni2dest))),
2602 				     i3);
2603 
2604 	      if (m_split == 0
2605 		  && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2606 		{
2607 		  struct undo *buf;
2608 
2609 		  PUT_MODE (regno_reg_rtx[REGNO (i2dest)], old_mode);
2610 		  buf = undobuf.undos;
2611 		  undobuf.undos = buf->next;
2612 		  buf->next = undobuf.frees;
2613 		  undobuf.frees = buf;
2614 		}
2615 	    }
2616 	}
2617 
2618       /* If recog_for_combine has discarded clobbers, try to use them
2619 	 again for the split.  */
2620       if (m_split == 0 && newpat_vec_with_clobbers)
2621 	m_split
2622 	  = split_insns (gen_rtx_PARALLEL (VOIDmode,
2623 					   newpat_vec_with_clobbers), i3);
2624 
2625       if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2626 	{
2627 	  m_split = PATTERN (m_split);
2628 	  insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2629 	  if (insn_code_number >= 0)
2630 	    newpat = m_split;
2631 	}
2632       else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2633 	       && (next_real_insn (i2) == i3
2634 		   || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2635 	{
2636 	  rtx i2set, i3set;
2637 	  rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2638 	  newi2pat = PATTERN (m_split);
2639 
2640 	  i3set = single_set (NEXT_INSN (m_split));
2641 	  i2set = single_set (m_split);
2642 
2643 	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2644 
2645 	  /* If I2 or I3 has multiple SETs, we won't know how to track
2646 	     register status, so don't use these insns.  If I2's destination
2647 	     is used between I2 and I3, we also can't use these insns.  */
2648 
2649 	  if (i2_code_number >= 0 && i2set && i3set
2650 	      && (next_real_insn (i2) == i3
2651 		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2652 	    insn_code_number = recog_for_combine (&newi3pat, i3,
2653 						  &new_i3_notes);
2654 	  if (insn_code_number >= 0)
2655 	    newpat = newi3pat;
2656 
2657 	  /* It is possible that both insns now set the destination of I3.
2658 	     If so, we must show an extra use of it.  */
2659 
2660 	  if (insn_code_number >= 0)
2661 	    {
2662 	      rtx new_i3_dest = SET_DEST (i3set);
2663 	      rtx new_i2_dest = SET_DEST (i2set);
2664 
2665 	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2666 		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2667 		     || GET_CODE (new_i3_dest) == SUBREG)
2668 		new_i3_dest = XEXP (new_i3_dest, 0);
2669 
2670 	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2671 		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2672 		     || GET_CODE (new_i2_dest) == SUBREG)
2673 		new_i2_dest = XEXP (new_i2_dest, 0);
2674 
2675 	      if (REG_P (new_i3_dest)
2676 		  && REG_P (new_i2_dest)
2677 		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2678 		REG_N_SETS (REGNO (new_i2_dest))++;
2679 	    }
2680 	}
2681 
2682       /* If we can split it and use I2DEST, go ahead and see if that
2683 	 helps things be recognized.  Verify that none of the registers
2684 	 are set between I2 and I3.  */
2685       if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2686 #ifdef HAVE_cc0
2687 	  && REG_P (i2dest)
2688 #endif
2689 	  /* We need I2DEST in the proper mode.  If it is a hard register
2690 	     or the only use of a pseudo, we can change its mode.
2691 	     Make sure we don't change a hard register to have a mode that
2692 	     isn't valid for it, or change the number of registers.  */
2693 	  && (GET_MODE (*split) == GET_MODE (i2dest)
2694 	      || GET_MODE (*split) == VOIDmode
2695 	      || can_change_dest_mode (i2dest, added_sets_2,
2696 				       GET_MODE (*split)))
2697 	  && (next_real_insn (i2) == i3
2698 	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2699 	  /* We can't overwrite I2DEST if its value is still used by
2700 	     NEWPAT.  */
2701 	  && ! reg_referenced_p (i2dest, newpat))
2702 	{
2703 	  rtx newdest = i2dest;
2704 	  enum rtx_code split_code = GET_CODE (*split);
2705 	  enum machine_mode split_mode = GET_MODE (*split);
2706 	  bool subst_done = false;
2707 	  newi2pat = NULL_RTX;
2708 
2709 	  /* Get NEWDEST as a register in the proper mode.  We have already
2710 	     validated that we can do this.  */
2711 	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2712 	    {
2713 	      if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
2714 		newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2715 	      else
2716 		{
2717 		  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
2718 		  newdest = regno_reg_rtx[REGNO (i2dest)];
2719 		}
2720 	    }
2721 
2722 	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2723 	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2724 	     appeared to be a memory address.  This is a kludge.  */
2725 	  if (split_code == MULT
2726 	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2727 	      && INTVAL (XEXP (*split, 1)) > 0
2728 	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2729 	    {
2730 	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
2731 					     XEXP (*split, 0), GEN_INT (i)));
2732 	      /* Update split_code because we may not have a multiply
2733 		 anymore.  */
2734 	      split_code = GET_CODE (*split);
2735 	    }
2736 
2737 #ifdef INSN_SCHEDULING
2738 	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2739 	     be written as a ZERO_EXTEND.  */
2740 	  if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
2741 	    {
2742 #ifdef LOAD_EXTEND_OP
2743 	      /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2744 		 what it really is.  */
2745 	      if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2746 		  == SIGN_EXTEND)
2747 		SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2748 						    SUBREG_REG (*split)));
2749 	      else
2750 #endif
2751 		SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2752 						    SUBREG_REG (*split)));
2753 	    }
2754 #endif
2755 
2756 	  /* Attempt to split binary operators using arithmetic identities.  */
2757 	  if (BINARY_P (SET_SRC (newpat))
2758 	      && split_mode == GET_MODE (SET_SRC (newpat))
2759 	      && ! side_effects_p (SET_SRC (newpat)))
2760 	    {
2761 	      rtx setsrc = SET_SRC (newpat);
2762 	      enum machine_mode mode = GET_MODE (setsrc);
2763 	      enum rtx_code code = GET_CODE (setsrc);
2764 	      rtx src_op0 = XEXP (setsrc, 0);
2765 	      rtx src_op1 = XEXP (setsrc, 1);
2766 
2767 	      /* Split "X = Y op Y" as "Z = Y; X = Z op Z".  */
2768 	      if (rtx_equal_p (src_op0, src_op1))
2769 		{
2770 		  newi2pat = gen_rtx_SET (VOIDmode, newdest, src_op0);
2771 		  SUBST (XEXP (setsrc, 0), newdest);
2772 		  SUBST (XEXP (setsrc, 1), newdest);
2773 		  subst_done = true;
2774 		}
2775 	      /* Split "((P op Q) op R) op S" where op is PLUS or MULT.  */
2776 	      else if ((code == PLUS || code == MULT)
2777 		       && GET_CODE (src_op0) == code
2778 		       && GET_CODE (XEXP (src_op0, 0)) == code
2779 		       && (INTEGRAL_MODE_P (mode)
2780 			   || (FLOAT_MODE_P (mode)
2781 			       && flag_unsafe_math_optimizations)))
2782 		{
2783 		  rtx p = XEXP (XEXP (src_op0, 0), 0);
2784 		  rtx q = XEXP (XEXP (src_op0, 0), 1);
2785 		  rtx r = XEXP (src_op0, 1);
2786 		  rtx s = src_op1;
2787 
2788 		  /* Split both "((X op Y) op X) op Y" and
2789 		     "((X op Y) op Y) op X" as "T op T" where T is
2790 		     "X op Y".  */
2791 		  if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
2792 		       || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
2793 		    {
2794 		      newi2pat = gen_rtx_SET (VOIDmode, newdest,
2795 					      XEXP (src_op0, 0));
2796 		      SUBST (XEXP (setsrc, 0), newdest);
2797 		      SUBST (XEXP (setsrc, 1), newdest);
2798 		      subst_done = true;
2799 		    }
2800 		  /* Split "((X op X) op Y) op Y)" as "T op T" where
2801 		     T is "X op Y".  */
2802 		  else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
2803 		    {
2804 		      rtx tmp = simplify_gen_binary (code, mode, p, r);
2805 		      newi2pat = gen_rtx_SET (VOIDmode, newdest, tmp);
2806 		      SUBST (XEXP (setsrc, 0), newdest);
2807 		      SUBST (XEXP (setsrc, 1), newdest);
2808 		      subst_done = true;
2809 		    }
2810 		}
2811 	    }
2812 
2813 	  if (!subst_done)
2814 	    {
2815 	      newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2816 	      SUBST (*split, newdest);
2817 	    }
2818 
2819 	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2820 
2821 	  /* recog_for_combine might have added CLOBBERs to newi2pat.
2822 	     Make sure NEWPAT does not depend on the clobbered regs.  */
2823 	  if (GET_CODE (newi2pat) == PARALLEL)
2824 	    for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
2825 	      if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
2826 		{
2827 		  rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
2828 		  if (reg_overlap_mentioned_p (reg, newpat))
2829 		    {
2830 		      undo_all ();
2831 		      return 0;
2832 		    }
2833 		}
2834 
2835 	  /* If the split point was a MULT and we didn't have one before,
2836 	     don't use one now.  */
2837 	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2838 	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2839 	}
2840     }
2841 
2842   /* Check for a case where we loaded from memory in a narrow mode and
2843      then sign extended it, but we need both registers.  In that case,
2844      we have a PARALLEL with both loads from the same memory location.
2845      We can split this into a load from memory followed by a register-register
2846      copy.  This saves at least one insn, more if register allocation can
2847      eliminate the copy.
2848 
2849      We cannot do this if the destination of the first assignment is a
2850      condition code register or cc0.  We eliminate this case by making sure
2851      the SET_DEST and SET_SRC have the same mode.
2852 
2853      We cannot do this if the destination of the second assignment is
2854      a register that we have already assumed is zero-extended.  Similarly
2855      for a SUBREG of such a register.  */
2856 
2857   else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2858 	   && GET_CODE (newpat) == PARALLEL
2859 	   && XVECLEN (newpat, 0) == 2
2860 	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2861 	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2862 	   && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2863 	       == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2864 	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2865 	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2866 			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2867 	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2868 				   INSN_CUID (i2))
2869 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2870 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2871 	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2872 		 (REG_P (temp)
2873 		  && reg_stat[REGNO (temp)].nonzero_bits != 0
2874 		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2875 		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2876 		  && (reg_stat[REGNO (temp)].nonzero_bits
2877 		      != GET_MODE_MASK (word_mode))))
2878 	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2879 		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2880 		     (REG_P (temp)
2881 		      && reg_stat[REGNO (temp)].nonzero_bits != 0
2882 		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2883 		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2884 		      && (reg_stat[REGNO (temp)].nonzero_bits
2885 			  != GET_MODE_MASK (word_mode)))))
2886 	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2887 					 SET_SRC (XVECEXP (newpat, 0, 1)))
2888 	   && ! find_reg_note (i3, REG_UNUSED,
2889 			       SET_DEST (XVECEXP (newpat, 0, 0))))
2890     {
2891       rtx ni2dest;
2892 
2893       newi2pat = XVECEXP (newpat, 0, 0);
2894       ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2895       newpat = XVECEXP (newpat, 0, 1);
2896       SUBST (SET_SRC (newpat),
2897 	     gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
2898       i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2899 
2900       if (i2_code_number >= 0)
2901 	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2902 
2903       if (insn_code_number >= 0)
2904 	swap_i2i3 = 1;
2905     }
2906 
2907   /* Similarly, check for a case where we have a PARALLEL of two independent
2908      SETs but we started with three insns.  In this case, we can do the sets
2909      as two separate insns.  This case occurs when some SET allows two
2910      other insns to combine, but the destination of that SET is still live.  */
2911 
2912   else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2913 	   && GET_CODE (newpat) == PARALLEL
2914 	   && XVECLEN (newpat, 0) == 2
2915 	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2916 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2917 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2918 	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2919 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2920 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2921 	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2922 				   INSN_CUID (i2))
2923 	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2924 				  XVECEXP (newpat, 0, 0))
2925 	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2926 				  XVECEXP (newpat, 0, 1))
2927 	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2928 		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))
2929 #ifdef HAVE_cc0
2930 	   /* We cannot split the parallel into two sets if both sets
2931 	      reference cc0.  */
2932 	   && ! (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))
2933 		 && reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 1)))
2934 #endif
2935 	   )
2936     {
2937       /* Normally, it doesn't matter which of the two is done first,
2938 	 but it does if one references cc0.  In that case, it has to
2939 	 be first.  */
2940 #ifdef HAVE_cc0
2941       if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2942 	{
2943 	  newi2pat = XVECEXP (newpat, 0, 0);
2944 	  newpat = XVECEXP (newpat, 0, 1);
2945 	}
2946       else
2947 #endif
2948 	{
2949 	  newi2pat = XVECEXP (newpat, 0, 1);
2950 	  newpat = XVECEXP (newpat, 0, 0);
2951 	}
2952 
2953       i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2954 
2955       if (i2_code_number >= 0)
2956 	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2957     }
2958 
2959   /* If it still isn't recognized, fail and change things back the way they
2960      were.  */
2961   if ((insn_code_number < 0
2962        /* Is the result a reasonable ASM_OPERANDS?  */
2963        && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2964     {
2965       undo_all ();
2966       return 0;
2967     }
2968 
2969   /* If we had to change another insn, make sure it is valid also.  */
2970   if (undobuf.other_insn)
2971     {
2972       rtx other_pat = PATTERN (undobuf.other_insn);
2973       rtx new_other_notes;
2974       rtx note, next;
2975 
2976       CLEAR_HARD_REG_SET (newpat_used_regs);
2977 
2978       other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2979 					     &new_other_notes);
2980 
2981       if (other_code_number < 0 && ! check_asm_operands (other_pat))
2982 	{
2983 	  undo_all ();
2984 	  return 0;
2985 	}
2986 
2987       PATTERN (undobuf.other_insn) = other_pat;
2988 
2989       /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2990 	 are still valid.  Then add any non-duplicate notes added by
2991 	 recog_for_combine.  */
2992       for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2993 	{
2994 	  next = XEXP (note, 1);
2995 
2996 	  if (REG_NOTE_KIND (note) == REG_UNUSED
2997 	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2998 	    {
2999 	      if (REG_P (XEXP (note, 0)))
3000 		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
3001 
3002 	      remove_note (undobuf.other_insn, note);
3003 	    }
3004 	}
3005 
3006       for (note = new_other_notes; note; note = XEXP (note, 1))
3007 	if (REG_P (XEXP (note, 0)))
3008 	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
3009 
3010       distribute_notes (new_other_notes, undobuf.other_insn,
3011 			undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
3012     }
3013 #ifdef HAVE_cc0
3014   /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
3015      they are adjacent to each other or not.  */
3016   {
3017     rtx p = prev_nonnote_insn (i3);
3018     if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
3019 	&& sets_cc0_p (newi2pat))
3020       {
3021 	undo_all ();
3022 	return 0;
3023       }
3024   }
3025 #endif
3026 
3027   /* Only allow this combination if insn_rtx_costs reports that the
3028      replacement instructions are cheaper than the originals.  */
3029   if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat))
3030     {
3031       undo_all ();
3032       return 0;
3033     }
3034 
3035   /* We now know that we can do this combination.  Merge the insns and
3036      update the status of registers and LOG_LINKS.  */
3037 
3038   if (swap_i2i3)
3039     {
3040       rtx insn;
3041       rtx link;
3042       rtx ni2dest;
3043 
3044       /* I3 now uses what used to be its destination and which is now
3045 	 I2's destination.  This requires us to do a few adjustments.  */
3046       PATTERN (i3) = newpat;
3047       adjust_for_new_dest (i3);
3048 
3049       /* We need a LOG_LINK from I3 to I2.  But we used to have one,
3050 	 so we still will.
3051 
3052 	 However, some later insn might be using I2's dest and have
3053 	 a LOG_LINK pointing at I3.  We must remove this link.
3054 	 The simplest way to remove the link is to point it at I1,
3055 	 which we know will be a NOTE.  */
3056 
3057       /* newi2pat is usually a SET here; however, recog_for_combine might
3058 	 have added some clobbers.  */
3059       if (GET_CODE (newi2pat) == PARALLEL)
3060 	ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
3061       else
3062 	ni2dest = SET_DEST (newi2pat);
3063 
3064       for (insn = NEXT_INSN (i3);
3065 	   insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3066 		    || insn != BB_HEAD (this_basic_block->next_bb));
3067 	   insn = NEXT_INSN (insn))
3068 	{
3069 	  if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
3070 	    {
3071 	      for (link = LOG_LINKS (insn); link;
3072 		   link = XEXP (link, 1))
3073 		if (XEXP (link, 0) == i3)
3074 		  XEXP (link, 0) = i1;
3075 
3076 	      break;
3077 	    }
3078 	}
3079     }
3080 
3081   {
3082     rtx i3notes, i2notes, i1notes = 0;
3083     rtx i3links, i2links, i1links = 0;
3084     rtx midnotes = 0;
3085     unsigned int regno;
3086     /* Compute which registers we expect to eliminate.  newi2pat may be setting
3087        either i3dest or i2dest, so we must check it.  Also, i1dest may be the
3088        same as i3dest, in which case newi2pat may be setting i1dest.  */
3089     rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
3090 		   || i2dest_in_i2src || i2dest_in_i1src
3091 		   || !i2dest_killed
3092 		   ? 0 : i2dest);
3093     rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
3094 		   || (newi2pat && reg_set_p (i1dest, newi2pat))
3095 		   || !i1dest_killed
3096 		   ? 0 : i1dest);
3097 
3098     /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3099        clear them.  */
3100     i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
3101     i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
3102     if (i1)
3103       i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
3104 
3105     /* Ensure that we do not have something that should not be shared but
3106        occurs multiple times in the new insns.  Check this by first
3107        resetting all the `used' flags and then copying anything is shared.  */
3108 
3109     reset_used_flags (i3notes);
3110     reset_used_flags (i2notes);
3111     reset_used_flags (i1notes);
3112     reset_used_flags (newpat);
3113     reset_used_flags (newi2pat);
3114     if (undobuf.other_insn)
3115       reset_used_flags (PATTERN (undobuf.other_insn));
3116 
3117     i3notes = copy_rtx_if_shared (i3notes);
3118     i2notes = copy_rtx_if_shared (i2notes);
3119     i1notes = copy_rtx_if_shared (i1notes);
3120     newpat = copy_rtx_if_shared (newpat);
3121     newi2pat = copy_rtx_if_shared (newi2pat);
3122     if (undobuf.other_insn)
3123       reset_used_flags (PATTERN (undobuf.other_insn));
3124 
3125     INSN_CODE (i3) = insn_code_number;
3126     PATTERN (i3) = newpat;
3127 
3128     if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
3129       {
3130 	rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
3131 
3132 	reset_used_flags (call_usage);
3133 	call_usage = copy_rtx (call_usage);
3134 
3135 	if (substed_i2)
3136 	  replace_rtx (call_usage, i2dest, i2src);
3137 
3138 	if (substed_i1)
3139 	  replace_rtx (call_usage, i1dest, i1src);
3140 
3141 	CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
3142       }
3143 
3144     if (undobuf.other_insn)
3145       INSN_CODE (undobuf.other_insn) = other_code_number;
3146 
3147     /* We had one special case above where I2 had more than one set and
3148        we replaced a destination of one of those sets with the destination
3149        of I3.  In that case, we have to update LOG_LINKS of insns later
3150        in this basic block.  Note that this (expensive) case is rare.
3151 
3152        Also, in this case, we must pretend that all REG_NOTEs for I2
3153        actually came from I3, so that REG_UNUSED notes from I2 will be
3154        properly handled.  */
3155 
3156     if (i3_subst_into_i2)
3157       {
3158 	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
3159 	  if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
3160 	       || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
3161 	      && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
3162 	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
3163 	      && ! find_reg_note (i2, REG_UNUSED,
3164 				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
3165 	    for (temp = NEXT_INSN (i2);
3166 		 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3167 			  || BB_HEAD (this_basic_block) != temp);
3168 		 temp = NEXT_INSN (temp))
3169 	      if (temp != i3 && INSN_P (temp))
3170 		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
3171 		  if (XEXP (link, 0) == i2)
3172 		    XEXP (link, 0) = i3;
3173 
3174 	if (i3notes)
3175 	  {
3176 	    rtx link = i3notes;
3177 	    while (XEXP (link, 1))
3178 	      link = XEXP (link, 1);
3179 	    XEXP (link, 1) = i2notes;
3180 	  }
3181 	else
3182 	  i3notes = i2notes;
3183 	i2notes = 0;
3184       }
3185 
3186     LOG_LINKS (i3) = 0;
3187     REG_NOTES (i3) = 0;
3188     LOG_LINKS (i2) = 0;
3189     REG_NOTES (i2) = 0;
3190 
3191     if (newi2pat)
3192       {
3193 	INSN_CODE (i2) = i2_code_number;
3194 	PATTERN (i2) = newi2pat;
3195       }
3196     else
3197       SET_INSN_DELETED (i2);
3198 
3199     if (i1)
3200       {
3201 	LOG_LINKS (i1) = 0;
3202 	REG_NOTES (i1) = 0;
3203 	SET_INSN_DELETED (i1);
3204       }
3205 
3206     /* Get death notes for everything that is now used in either I3 or
3207        I2 and used to die in a previous insn.  If we built two new
3208        patterns, move from I1 to I2 then I2 to I3 so that we get the
3209        proper movement on registers that I2 modifies.  */
3210 
3211     if (newi2pat)
3212       {
3213 	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
3214 	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
3215       }
3216     else
3217       move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
3218 		   i3, &midnotes);
3219 
3220     /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
3221     if (i3notes)
3222       distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
3223 			elim_i2, elim_i1);
3224     if (i2notes)
3225       distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
3226 			elim_i2, elim_i1);
3227     if (i1notes)
3228       distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
3229 			elim_i2, elim_i1);
3230     if (midnotes)
3231       distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3232 			elim_i2, elim_i1);
3233 
3234     /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
3235        know these are REG_UNUSED and want them to go to the desired insn,
3236        so we always pass it as i3.  We have not counted the notes in
3237        reg_n_deaths yet, so we need to do so now.  */
3238 
3239     if (newi2pat && new_i2_notes)
3240       {
3241 	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
3242 	  if (REG_P (XEXP (temp, 0)))
3243 	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
3244 
3245 	distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3246       }
3247 
3248     if (new_i3_notes)
3249       {
3250 	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
3251 	  if (REG_P (XEXP (temp, 0)))
3252 	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
3253 
3254 	distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3255       }
3256 
3257     /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
3258        put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
3259        I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
3260        in that case, it might delete I2.  Similarly for I2 and I1.
3261        Show an additional death due to the REG_DEAD note we make here.  If
3262        we discard it in distribute_notes, we will decrement it again.  */
3263 
3264     if (i3dest_killed)
3265       {
3266 	if (REG_P (i3dest_killed))
3267 	  REG_N_DEATHS (REGNO (i3dest_killed))++;
3268 
3269 	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3270 	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3271 					       NULL_RTX),
3272 			    NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3273 	else
3274 	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3275 					       NULL_RTX),
3276 			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3277 			    elim_i2, elim_i1);
3278       }
3279 
3280     if (i2dest_in_i2src)
3281       {
3282 	if (REG_P (i2dest))
3283 	  REG_N_DEATHS (REGNO (i2dest))++;
3284 
3285 	if (newi2pat && reg_set_p (i2dest, newi2pat))
3286 	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3287 			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3288 	else
3289 	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3290 			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3291 			    NULL_RTX, NULL_RTX);
3292       }
3293 
3294     if (i1dest_in_i1src)
3295       {
3296 	if (REG_P (i1dest))
3297 	  REG_N_DEATHS (REGNO (i1dest))++;
3298 
3299 	if (newi2pat && reg_set_p (i1dest, newi2pat))
3300 	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3301 			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3302 	else
3303 	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3304 			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3305 			    NULL_RTX, NULL_RTX);
3306       }
3307 
3308     distribute_links (i3links);
3309     distribute_links (i2links);
3310     distribute_links (i1links);
3311 
3312     if (REG_P (i2dest))
3313       {
3314 	rtx link;
3315 	rtx i2_insn = 0, i2_val = 0, set;
3316 
3317 	/* The insn that used to set this register doesn't exist, and
3318 	   this life of the register may not exist either.  See if one of
3319 	   I3's links points to an insn that sets I2DEST.  If it does,
3320 	   that is now the last known value for I2DEST. If we don't update
3321 	   this and I2 set the register to a value that depended on its old
3322 	   contents, we will get confused.  If this insn is used, thing
3323 	   will be set correctly in combine_instructions.  */
3324 
3325 	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3326 	  if ((set = single_set (XEXP (link, 0))) != 0
3327 	      && rtx_equal_p (i2dest, SET_DEST (set)))
3328 	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
3329 
3330 	record_value_for_reg (i2dest, i2_insn, i2_val);
3331 
3332 	/* If the reg formerly set in I2 died only once and that was in I3,
3333 	   zero its use count so it won't make `reload' do any work.  */
3334 	if (! added_sets_2
3335 	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
3336 	    && ! i2dest_in_i2src)
3337 	  {
3338 	    regno = REGNO (i2dest);
3339 	    REG_N_SETS (regno)--;
3340 	  }
3341       }
3342 
3343     if (i1 && REG_P (i1dest))
3344       {
3345 	rtx link;
3346 	rtx i1_insn = 0, i1_val = 0, set;
3347 
3348 	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3349 	  if ((set = single_set (XEXP (link, 0))) != 0
3350 	      && rtx_equal_p (i1dest, SET_DEST (set)))
3351 	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
3352 
3353 	record_value_for_reg (i1dest, i1_insn, i1_val);
3354 
3355 	regno = REGNO (i1dest);
3356 	if (! added_sets_1 && ! i1dest_in_i1src)
3357 	  REG_N_SETS (regno)--;
3358       }
3359 
3360     /* Update reg_stat[].nonzero_bits et al for any changes that may have
3361        been made to this insn.  The order of
3362        set_nonzero_bits_and_sign_copies() is important.  Because newi2pat
3363        can affect nonzero_bits of newpat */
3364     if (newi2pat)
3365       note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
3366     note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
3367 
3368     /* Set new_direct_jump_p if a new return or simple jump instruction
3369        has been created.
3370 
3371        If I3 is now an unconditional jump, ensure that it has a
3372        BARRIER following it since it may have initially been a
3373        conditional jump.  It may also be the last nonnote insn.  */
3374 
3375     if (returnjump_p (i3) || any_uncondjump_p (i3))
3376       {
3377 	*new_direct_jump_p = 1;
3378 	mark_jump_label (PATTERN (i3), i3, 0);
3379 
3380 	if ((temp = next_nonnote_insn (i3)) == NULL_RTX
3381 	    || !BARRIER_P (temp))
3382 	  emit_barrier_after (i3);
3383       }
3384 
3385     if (undobuf.other_insn != NULL_RTX
3386 	&& (returnjump_p (undobuf.other_insn)
3387 	    || any_uncondjump_p (undobuf.other_insn)))
3388       {
3389 	*new_direct_jump_p = 1;
3390 
3391 	if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
3392 	    || !BARRIER_P (temp))
3393 	  emit_barrier_after (undobuf.other_insn);
3394       }
3395 
3396     /* An NOOP jump does not need barrier, but it does need cleaning up
3397        of CFG.  */
3398     if (GET_CODE (newpat) == SET
3399 	&& SET_SRC (newpat) == pc_rtx
3400 	&& SET_DEST (newpat) == pc_rtx)
3401       *new_direct_jump_p = 1;
3402   }
3403 
3404   combine_successes++;
3405   undo_commit ();
3406 
3407   if (added_links_insn
3408       && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
3409       && INSN_CUID (added_links_insn) < INSN_CUID (i3))
3410     return added_links_insn;
3411   else
3412     return newi2pat ? i2 : i3;
3413 }
3414 
3415 /* Undo all the modifications recorded in undobuf.  */
3416 
3417 static void
undo_all(void)3418 undo_all (void)
3419 {
3420   struct undo *undo, *next;
3421 
3422   for (undo = undobuf.undos; undo; undo = next)
3423     {
3424       next = undo->next;
3425       switch (undo->kind)
3426 	{
3427 	case UNDO_RTX:
3428 	  *undo->where.r = undo->old_contents.r;
3429 	  break;
3430 	case UNDO_INT:
3431 	  *undo->where.i = undo->old_contents.i;
3432 	  break;
3433 	case UNDO_MODE:
3434 	  PUT_MODE (*undo->where.r, undo->old_contents.m);
3435 	  break;
3436 	default:
3437 	  gcc_unreachable ();
3438 	}
3439 
3440       undo->next = undobuf.frees;
3441       undobuf.frees = undo;
3442     }
3443 
3444   undobuf.undos = 0;
3445 }
3446 
3447 /* We've committed to accepting the changes we made.  Move all
3448    of the undos to the free list.  */
3449 
3450 static void
undo_commit(void)3451 undo_commit (void)
3452 {
3453   struct undo *undo, *next;
3454 
3455   for (undo = undobuf.undos; undo; undo = next)
3456     {
3457       next = undo->next;
3458       undo->next = undobuf.frees;
3459       undobuf.frees = undo;
3460     }
3461   undobuf.undos = 0;
3462 }
3463 
3464 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
3465    where we have an arithmetic expression and return that point.  LOC will
3466    be inside INSN.
3467 
3468    try_combine will call this function to see if an insn can be split into
3469    two insns.  */
3470 
3471 static rtx *
find_split_point(rtx * loc,rtx insn)3472 find_split_point (rtx *loc, rtx insn)
3473 {
3474   rtx x = *loc;
3475   enum rtx_code code = GET_CODE (x);
3476   rtx *split;
3477   unsigned HOST_WIDE_INT len = 0;
3478   HOST_WIDE_INT pos = 0;
3479   int unsignedp = 0;
3480   rtx inner = NULL_RTX;
3481 
3482   /* First special-case some codes.  */
3483   switch (code)
3484     {
3485     case SUBREG:
3486 #ifdef INSN_SCHEDULING
3487       /* If we are making a paradoxical SUBREG invalid, it becomes a split
3488 	 point.  */
3489       if (MEM_P (SUBREG_REG (x)))
3490 	return loc;
3491 #endif
3492       return find_split_point (&SUBREG_REG (x), insn);
3493 
3494     case MEM:
3495 #ifdef HAVE_lo_sum
3496       /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
3497 	 using LO_SUM and HIGH.  */
3498       if (GET_CODE (XEXP (x, 0)) == CONST
3499 	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3500 	{
3501 	  SUBST (XEXP (x, 0),
3502 		 gen_rtx_LO_SUM (Pmode,
3503 				 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
3504 				 XEXP (x, 0)));
3505 	  return &XEXP (XEXP (x, 0), 0);
3506 	}
3507 #endif
3508 
3509       /* If we have a PLUS whose second operand is a constant and the
3510 	 address is not valid, perhaps will can split it up using
3511 	 the machine-specific way to split large constants.  We use
3512 	 the first pseudo-reg (one of the virtual regs) as a placeholder;
3513 	 it will not remain in the result.  */
3514       if (GET_CODE (XEXP (x, 0)) == PLUS
3515 	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3516 	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
3517 	{
3518 	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3519 	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
3520 				 subst_insn);
3521 
3522 	  /* This should have produced two insns, each of which sets our
3523 	     placeholder.  If the source of the second is a valid address,
3524 	     we can make put both sources together and make a split point
3525 	     in the middle.  */
3526 
3527 	  if (seq
3528 	      && NEXT_INSN (seq) != NULL_RTX
3529 	      && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
3530 	      && NONJUMP_INSN_P (seq)
3531 	      && GET_CODE (PATTERN (seq)) == SET
3532 	      && SET_DEST (PATTERN (seq)) == reg
3533 	      && ! reg_mentioned_p (reg,
3534 				    SET_SRC (PATTERN (seq)))
3535 	      && NONJUMP_INSN_P (NEXT_INSN (seq))
3536 	      && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
3537 	      && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
3538 	      && memory_address_p (GET_MODE (x),
3539 				   SET_SRC (PATTERN (NEXT_INSN (seq)))))
3540 	    {
3541 	      rtx src1 = SET_SRC (PATTERN (seq));
3542 	      rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
3543 
3544 	      /* Replace the placeholder in SRC2 with SRC1.  If we can
3545 		 find where in SRC2 it was placed, that can become our
3546 		 split point and we can replace this address with SRC2.
3547 		 Just try two obvious places.  */
3548 
3549 	      src2 = replace_rtx (src2, reg, src1);
3550 	      split = 0;
3551 	      if (XEXP (src2, 0) == src1)
3552 		split = &XEXP (src2, 0);
3553 	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
3554 		       && XEXP (XEXP (src2, 0), 0) == src1)
3555 		split = &XEXP (XEXP (src2, 0), 0);
3556 
3557 	      if (split)
3558 		{
3559 		  SUBST (XEXP (x, 0), src2);
3560 		  return split;
3561 		}
3562 	    }
3563 
3564 	  /* If that didn't work, perhaps the first operand is complex and
3565 	     needs to be computed separately, so make a split point there.
3566 	     This will occur on machines that just support REG + CONST
3567 	     and have a constant moved through some previous computation.  */
3568 
3569 	  else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
3570 		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3571 			 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
3572 	    return &XEXP (XEXP (x, 0), 0);
3573 	}
3574       break;
3575 
3576     case SET:
3577 #ifdef HAVE_cc0
3578       /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3579 	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3580 	 we need to put the operand into a register.  So split at that
3581 	 point.  */
3582 
3583       if (SET_DEST (x) == cc0_rtx
3584 	  && GET_CODE (SET_SRC (x)) != COMPARE
3585 	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3586 	  && !OBJECT_P (SET_SRC (x))
3587 	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
3588 		&& OBJECT_P (SUBREG_REG (SET_SRC (x)))))
3589 	return &SET_SRC (x);
3590 #endif
3591 
3592       /* See if we can split SET_SRC as it stands.  */
3593       split = find_split_point (&SET_SRC (x), insn);
3594       if (split && split != &SET_SRC (x))
3595 	return split;
3596 
3597       /* See if we can split SET_DEST as it stands.  */
3598       split = find_split_point (&SET_DEST (x), insn);
3599       if (split && split != &SET_DEST (x))
3600 	return split;
3601 
3602       /* See if this is a bitfield assignment with everything constant.  If
3603 	 so, this is an IOR of an AND, so split it into that.  */
3604       if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3605 	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3606 	      <= HOST_BITS_PER_WIDE_INT)
3607 	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3608 	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3609 	  && GET_CODE (SET_SRC (x)) == CONST_INT
3610 	  && ((INTVAL (XEXP (SET_DEST (x), 1))
3611 	       + INTVAL (XEXP (SET_DEST (x), 2)))
3612 	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3613 	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3614 	{
3615 	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3616 	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3617 	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3618 	  rtx dest = XEXP (SET_DEST (x), 0);
3619 	  enum machine_mode mode = GET_MODE (dest);
3620 	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3621 	  rtx or_mask;
3622 
3623 	  if (BITS_BIG_ENDIAN)
3624 	    pos = GET_MODE_BITSIZE (mode) - len - pos;
3625 
3626 	  or_mask = gen_int_mode (src << pos, mode);
3627 	  if (src == mask)
3628 	    SUBST (SET_SRC (x),
3629 		   simplify_gen_binary (IOR, mode, dest, or_mask));
3630 	  else
3631 	    {
3632 	      rtx negmask = gen_int_mode (~(mask << pos), mode);
3633 	      SUBST (SET_SRC (x),
3634 		     simplify_gen_binary (IOR, mode,
3635 					  simplify_gen_binary (AND, mode,
3636 							       dest, negmask),
3637 					  or_mask));
3638 	    }
3639 
3640 	  SUBST (SET_DEST (x), dest);
3641 
3642 	  split = find_split_point (&SET_SRC (x), insn);
3643 	  if (split && split != &SET_SRC (x))
3644 	    return split;
3645 	}
3646 
3647       /* Otherwise, see if this is an operation that we can split into two.
3648 	 If so, try to split that.  */
3649       code = GET_CODE (SET_SRC (x));
3650 
3651       switch (code)
3652 	{
3653 	case AND:
3654 	  /* If we are AND'ing with a large constant that is only a single
3655 	     bit and the result is only being used in a context where we
3656 	     need to know if it is zero or nonzero, replace it with a bit
3657 	     extraction.  This will avoid the large constant, which might
3658 	     have taken more than one insn to make.  If the constant were
3659 	     not a valid argument to the AND but took only one insn to make,
3660 	     this is no worse, but if it took more than one insn, it will
3661 	     be better.  */
3662 
3663 	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3664 	      && REG_P (XEXP (SET_SRC (x), 0))
3665 	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3666 	      && REG_P (SET_DEST (x))
3667 	      && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3668 	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3669 	      && XEXP (*split, 0) == SET_DEST (x)
3670 	      && XEXP (*split, 1) == const0_rtx)
3671 	    {
3672 	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3673 						XEXP (SET_SRC (x), 0),
3674 						pos, NULL_RTX, 1, 1, 0, 0);
3675 	      if (extraction != 0)
3676 		{
3677 		  SUBST (SET_SRC (x), extraction);
3678 		  return find_split_point (loc, insn);
3679 		}
3680 	    }
3681 	  break;
3682 
3683 	case NE:
3684 	  /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3685 	     is known to be on, this can be converted into a NEG of a shift.  */
3686 	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3687 	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3688 	      && 1 <= (pos = exact_log2
3689 		       (nonzero_bits (XEXP (SET_SRC (x), 0),
3690 				      GET_MODE (XEXP (SET_SRC (x), 0))))))
3691 	    {
3692 	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3693 
3694 	      SUBST (SET_SRC (x),
3695 		     gen_rtx_NEG (mode,
3696 				  gen_rtx_LSHIFTRT (mode,
3697 						    XEXP (SET_SRC (x), 0),
3698 						    GEN_INT (pos))));
3699 
3700 	      split = find_split_point (&SET_SRC (x), insn);
3701 	      if (split && split != &SET_SRC (x))
3702 		return split;
3703 	    }
3704 	  break;
3705 
3706 	case SIGN_EXTEND:
3707 	  inner = XEXP (SET_SRC (x), 0);
3708 
3709 	  /* We can't optimize if either mode is a partial integer
3710 	     mode as we don't know how many bits are significant
3711 	     in those modes.  */
3712 	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3713 	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3714 	    break;
3715 
3716 	  pos = 0;
3717 	  len = GET_MODE_BITSIZE (GET_MODE (inner));
3718 	  unsignedp = 0;
3719 	  break;
3720 
3721 	case SIGN_EXTRACT:
3722 	case ZERO_EXTRACT:
3723 	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3724 	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3725 	    {
3726 	      inner = XEXP (SET_SRC (x), 0);
3727 	      len = INTVAL (XEXP (SET_SRC (x), 1));
3728 	      pos = INTVAL (XEXP (SET_SRC (x), 2));
3729 
3730 	      if (BITS_BIG_ENDIAN)
3731 		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3732 	      unsignedp = (code == ZERO_EXTRACT);
3733 	    }
3734 	  break;
3735 
3736 	default:
3737 	  break;
3738 	}
3739 
3740       if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3741 	{
3742 	  enum machine_mode mode = GET_MODE (SET_SRC (x));
3743 
3744 	  /* For unsigned, we have a choice of a shift followed by an
3745 	     AND or two shifts.  Use two shifts for field sizes where the
3746 	     constant might be too large.  We assume here that we can
3747 	     always at least get 8-bit constants in an AND insn, which is
3748 	     true for every current RISC.  */
3749 
3750 	  if (unsignedp && len <= 8)
3751 	    {
3752 	      SUBST (SET_SRC (x),
3753 		     gen_rtx_AND (mode,
3754 				  gen_rtx_LSHIFTRT
3755 				  (mode, gen_lowpart (mode, inner),
3756 				   GEN_INT (pos)),
3757 				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3758 
3759 	      split = find_split_point (&SET_SRC (x), insn);
3760 	      if (split && split != &SET_SRC (x))
3761 		return split;
3762 	    }
3763 	  else
3764 	    {
3765 	      SUBST (SET_SRC (x),
3766 		     gen_rtx_fmt_ee
3767 		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3768 		      gen_rtx_ASHIFT (mode,
3769 				      gen_lowpart (mode, inner),
3770 				      GEN_INT (GET_MODE_BITSIZE (mode)
3771 					       - len - pos)),
3772 		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3773 
3774 	      split = find_split_point (&SET_SRC (x), insn);
3775 	      if (split && split != &SET_SRC (x))
3776 		return split;
3777 	    }
3778 	}
3779 
3780       /* See if this is a simple operation with a constant as the second
3781 	 operand.  It might be that this constant is out of range and hence
3782 	 could be used as a split point.  */
3783       if (BINARY_P (SET_SRC (x))
3784 	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
3785 	  && (OBJECT_P (XEXP (SET_SRC (x), 0))
3786 	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3787 		  && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
3788 	return &XEXP (SET_SRC (x), 1);
3789 
3790       /* Finally, see if this is a simple operation with its first operand
3791 	 not in a register.  The operation might require this operand in a
3792 	 register, so return it as a split point.  We can always do this
3793 	 because if the first operand were another operation, we would have
3794 	 already found it as a split point.  */
3795       if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
3796 	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3797 	return &XEXP (SET_SRC (x), 0);
3798 
3799       return 0;
3800 
3801     case AND:
3802     case IOR:
3803       /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3804 	 it is better to write this as (not (ior A B)) so we can split it.
3805 	 Similarly for IOR.  */
3806       if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3807 	{
3808 	  SUBST (*loc,
3809 		 gen_rtx_NOT (GET_MODE (x),
3810 			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3811 					      GET_MODE (x),
3812 					      XEXP (XEXP (x, 0), 0),
3813 					      XEXP (XEXP (x, 1), 0))));
3814 	  return find_split_point (loc, insn);
3815 	}
3816 
3817       /* Many RISC machines have a large set of logical insns.  If the
3818 	 second operand is a NOT, put it first so we will try to split the
3819 	 other operand first.  */
3820       if (GET_CODE (XEXP (x, 1)) == NOT)
3821 	{
3822 	  rtx tem = XEXP (x, 0);
3823 	  SUBST (XEXP (x, 0), XEXP (x, 1));
3824 	  SUBST (XEXP (x, 1), tem);
3825 	}
3826       break;
3827 
3828     default:
3829       break;
3830     }
3831 
3832   /* Otherwise, select our actions depending on our rtx class.  */
3833   switch (GET_RTX_CLASS (code))
3834     {
3835     case RTX_BITFIELD_OPS:		/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3836     case RTX_TERNARY:
3837       split = find_split_point (&XEXP (x, 2), insn);
3838       if (split)
3839 	return split;
3840       /* ... fall through ...  */
3841     case RTX_BIN_ARITH:
3842     case RTX_COMM_ARITH:
3843     case RTX_COMPARE:
3844     case RTX_COMM_COMPARE:
3845       split = find_split_point (&XEXP (x, 1), insn);
3846       if (split)
3847 	return split;
3848       /* ... fall through ...  */
3849     case RTX_UNARY:
3850       /* Some machines have (and (shift ...) ...) insns.  If X is not
3851 	 an AND, but XEXP (X, 0) is, use it as our split point.  */
3852       if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3853 	return &XEXP (x, 0);
3854 
3855       split = find_split_point (&XEXP (x, 0), insn);
3856       if (split)
3857 	return split;
3858       return loc;
3859 
3860     default:
3861       /* Otherwise, we don't have a split point.  */
3862       return 0;
3863     }
3864 }
3865 
3866 /* Throughout X, replace FROM with TO, and return the result.
3867    The result is TO if X is FROM;
3868    otherwise the result is X, but its contents may have been modified.
3869    If they were modified, a record was made in undobuf so that
3870    undo_all will (among other things) return X to its original state.
3871 
3872    If the number of changes necessary is too much to record to undo,
3873    the excess changes are not made, so the result is invalid.
3874    The changes already made can still be undone.
3875    undobuf.num_undo is incremented for such changes, so by testing that
3876    the caller can tell whether the result is valid.
3877 
3878    `n_occurrences' is incremented each time FROM is replaced.
3879 
3880    IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3881 
3882    UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
3883    by copying if `n_occurrences' is nonzero.  */
3884 
3885 static rtx
subst(rtx x,rtx from,rtx to,int in_dest,int unique_copy)3886 subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
3887 {
3888   enum rtx_code code = GET_CODE (x);
3889   enum machine_mode op0_mode = VOIDmode;
3890   const char *fmt;
3891   int len, i;
3892   rtx new;
3893 
3894 /* Two expressions are equal if they are identical copies of a shared
3895    RTX or if they are both registers with the same register number
3896    and mode.  */
3897 
3898 #define COMBINE_RTX_EQUAL_P(X,Y)			\
3899   ((X) == (Y)						\
3900    || (REG_P (X) && REG_P (Y)	\
3901        && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3902 
3903   if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3904     {
3905       n_occurrences++;
3906       return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3907     }
3908 
3909   /* If X and FROM are the same register but different modes, they will
3910      not have been seen as equal above.  However, flow.c will make a
3911      LOG_LINKS entry for that case.  If we do nothing, we will try to
3912      rerecognize our original insn and, when it succeeds, we will
3913      delete the feeding insn, which is incorrect.
3914 
3915      So force this insn not to match in this (rare) case.  */
3916   if (! in_dest && code == REG && REG_P (from)
3917       && REGNO (x) == REGNO (from))
3918     return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3919 
3920   /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3921      of which may contain things that can be combined.  */
3922   if (code != MEM && code != LO_SUM && OBJECT_P (x))
3923     return x;
3924 
3925   /* It is possible to have a subexpression appear twice in the insn.
3926      Suppose that FROM is a register that appears within TO.
3927      Then, after that subexpression has been scanned once by `subst',
3928      the second time it is scanned, TO may be found.  If we were
3929      to scan TO here, we would find FROM within it and create a
3930      self-referent rtl structure which is completely wrong.  */
3931   if (COMBINE_RTX_EQUAL_P (x, to))
3932     return to;
3933 
3934   /* Parallel asm_operands need special attention because all of the
3935      inputs are shared across the arms.  Furthermore, unsharing the
3936      rtl results in recognition failures.  Failure to handle this case
3937      specially can result in circular rtl.
3938 
3939      Solve this by doing a normal pass across the first entry of the
3940      parallel, and only processing the SET_DESTs of the subsequent
3941      entries.  Ug.  */
3942 
3943   if (code == PARALLEL
3944       && GET_CODE (XVECEXP (x, 0, 0)) == SET
3945       && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3946     {
3947       new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3948 
3949       /* If this substitution failed, this whole thing fails.  */
3950       if (GET_CODE (new) == CLOBBER
3951 	  && XEXP (new, 0) == const0_rtx)
3952 	return new;
3953 
3954       SUBST (XVECEXP (x, 0, 0), new);
3955 
3956       for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3957 	{
3958 	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3959 
3960 	  if (!REG_P (dest)
3961 	      && GET_CODE (dest) != CC0
3962 	      && GET_CODE (dest) != PC)
3963 	    {
3964 	      new = subst (dest, from, to, 0, unique_copy);
3965 
3966 	      /* If this substitution failed, this whole thing fails.  */
3967 	      if (GET_CODE (new) == CLOBBER
3968 		  && XEXP (new, 0) == const0_rtx)
3969 		return new;
3970 
3971 	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3972 	    }
3973 	}
3974     }
3975   else
3976     {
3977       len = GET_RTX_LENGTH (code);
3978       fmt = GET_RTX_FORMAT (code);
3979 
3980       /* We don't need to process a SET_DEST that is a register, CC0,
3981 	 or PC, so set up to skip this common case.  All other cases
3982 	 where we want to suppress replacing something inside a
3983 	 SET_SRC are handled via the IN_DEST operand.  */
3984       if (code == SET
3985 	  && (REG_P (SET_DEST (x))
3986 	      || GET_CODE (SET_DEST (x)) == CC0
3987 	      || GET_CODE (SET_DEST (x)) == PC))
3988 	fmt = "ie";
3989 
3990       /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3991 	 constant.  */
3992       if (fmt[0] == 'e')
3993 	op0_mode = GET_MODE (XEXP (x, 0));
3994 
3995       for (i = 0; i < len; i++)
3996 	{
3997 	  if (fmt[i] == 'E')
3998 	    {
3999 	      int j;
4000 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4001 		{
4002 		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
4003 		    {
4004 		      new = (unique_copy && n_occurrences
4005 			     ? copy_rtx (to) : to);
4006 		      n_occurrences++;
4007 		    }
4008 		  else
4009 		    {
4010 		      new = subst (XVECEXP (x, i, j), from, to, 0,
4011 				   unique_copy);
4012 
4013 		      /* If this substitution failed, this whole thing
4014 			 fails.  */
4015 		      if (GET_CODE (new) == CLOBBER
4016 			  && XEXP (new, 0) == const0_rtx)
4017 			return new;
4018 		    }
4019 
4020 		  SUBST (XVECEXP (x, i, j), new);
4021 		}
4022 	    }
4023 	  else if (fmt[i] == 'e')
4024 	    {
4025 	      /* If this is a register being set, ignore it.  */
4026 	      new = XEXP (x, i);
4027 	      if (in_dest
4028 		  && i == 0
4029 		  && (((code == SUBREG || code == ZERO_EXTRACT)
4030 		       && REG_P (new))
4031 		      || code == STRICT_LOW_PART))
4032 		;
4033 
4034 	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4035 		{
4036 		  /* In general, don't install a subreg involving two
4037 		     modes not tieable.  It can worsen register
4038 		     allocation, and can even make invalid reload
4039 		     insns, since the reg inside may need to be copied
4040 		     from in the outside mode, and that may be invalid
4041 		     if it is an fp reg copied in integer mode.
4042 
4043 		     We allow two exceptions to this: It is valid if
4044 		     it is inside another SUBREG and the mode of that
4045 		     SUBREG and the mode of the inside of TO is
4046 		     tieable and it is valid if X is a SET that copies
4047 		     FROM to CC0.  */
4048 
4049 		  if (GET_CODE (to) == SUBREG
4050 		      && ! MODES_TIEABLE_P (GET_MODE (to),
4051 					    GET_MODE (SUBREG_REG (to)))
4052 		      && ! (code == SUBREG
4053 			    && MODES_TIEABLE_P (GET_MODE (x),
4054 						GET_MODE (SUBREG_REG (to))))
4055 #ifdef HAVE_cc0
4056 		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
4057 #endif
4058 		      )
4059 		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4060 
4061 #ifdef CANNOT_CHANGE_MODE_CLASS
4062 		  if (code == SUBREG
4063 		      && REG_P (to)
4064 		      && REGNO (to) < FIRST_PSEUDO_REGISTER
4065 		      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
4066 						   GET_MODE (to),
4067 						   GET_MODE (x)))
4068 		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4069 #endif
4070 
4071 		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
4072 		  n_occurrences++;
4073 		}
4074 	      else
4075 		/* If we are in a SET_DEST, suppress most cases unless we
4076 		   have gone inside a MEM, in which case we want to
4077 		   simplify the address.  We assume here that things that
4078 		   are actually part of the destination have their inner
4079 		   parts in the first expression.  This is true for SUBREG,
4080 		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4081 		   things aside from REG and MEM that should appear in a
4082 		   SET_DEST.  */
4083 		new = subst (XEXP (x, i), from, to,
4084 			     (((in_dest
4085 				&& (code == SUBREG || code == STRICT_LOW_PART
4086 				    || code == ZERO_EXTRACT))
4087 			       || code == SET)
4088 			      && i == 0), unique_copy);
4089 
4090 	      /* If we found that we will have to reject this combination,
4091 		 indicate that by returning the CLOBBER ourselves, rather than
4092 		 an expression containing it.  This will speed things up as
4093 		 well as prevent accidents where two CLOBBERs are considered
4094 		 to be equal, thus producing an incorrect simplification.  */
4095 
4096 	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
4097 		return new;
4098 
4099 	      if (GET_CODE (x) == SUBREG
4100 		  && (GET_CODE (new) == CONST_INT
4101 		      || GET_CODE (new) == CONST_DOUBLE))
4102 		{
4103 		  enum machine_mode mode = GET_MODE (x);
4104 
4105 		  x = simplify_subreg (GET_MODE (x), new,
4106 				       GET_MODE (SUBREG_REG (x)),
4107 				       SUBREG_BYTE (x));
4108 		  if (! x)
4109 		    x = gen_rtx_CLOBBER (mode, const0_rtx);
4110 		}
4111 	      else if (GET_CODE (new) == CONST_INT
4112 		       && GET_CODE (x) == ZERO_EXTEND)
4113 		{
4114 		  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
4115 						new, GET_MODE (XEXP (x, 0)));
4116 		  gcc_assert (x);
4117 		}
4118 	      else
4119 		SUBST (XEXP (x, i), new);
4120 	    }
4121 	}
4122     }
4123 
4124   /* Try to simplify X.  If the simplification changed the code, it is likely
4125      that further simplification will help, so loop, but limit the number
4126      of repetitions that will be performed.  */
4127 
4128   for (i = 0; i < 4; i++)
4129     {
4130       /* If X is sufficiently simple, don't bother trying to do anything
4131 	 with it.  */
4132       if (code != CONST_INT && code != REG && code != CLOBBER)
4133 	x = combine_simplify_rtx (x, op0_mode, in_dest);
4134 
4135       if (GET_CODE (x) == code)
4136 	break;
4137 
4138       code = GET_CODE (x);
4139 
4140       /* We no longer know the original mode of operand 0 since we
4141 	 have changed the form of X)  */
4142       op0_mode = VOIDmode;
4143     }
4144 
4145   return x;
4146 }
4147 
4148 /* Simplify X, a piece of RTL.  We just operate on the expression at the
4149    outer level; call `subst' to simplify recursively.  Return the new
4150    expression.
4151 
4152    OP0_MODE is the original mode of XEXP (x, 0).  IN_DEST is nonzero
4153    if we are inside a SET_DEST.  */
4154 
4155 static rtx
combine_simplify_rtx(rtx x,enum machine_mode op0_mode,int in_dest)4156 combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
4157 {
4158   enum rtx_code code = GET_CODE (x);
4159   enum machine_mode mode = GET_MODE (x);
4160   rtx temp;
4161   int i;
4162 
4163   /* If this is a commutative operation, put a constant last and a complex
4164      expression first.  We don't need to do this for comparisons here.  */
4165   if (COMMUTATIVE_ARITH_P (x)
4166       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
4167     {
4168       temp = XEXP (x, 0);
4169       SUBST (XEXP (x, 0), XEXP (x, 1));
4170       SUBST (XEXP (x, 1), temp);
4171     }
4172 
4173   /* If this is a simple operation applied to an IF_THEN_ELSE, try
4174      applying it to the arms of the IF_THEN_ELSE.  This often simplifies
4175      things.  Check for cases where both arms are testing the same
4176      condition.
4177 
4178      Don't do anything if all operands are very simple.  */
4179 
4180   if ((BINARY_P (x)
4181        && ((!OBJECT_P (XEXP (x, 0))
4182 	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4183 		  && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
4184 	   || (!OBJECT_P (XEXP (x, 1))
4185 	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
4186 		     && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
4187       || (UNARY_P (x)
4188 	  && (!OBJECT_P (XEXP (x, 0))
4189 	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4190 		     && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
4191     {
4192       rtx cond, true_rtx, false_rtx;
4193 
4194       cond = if_then_else_cond (x, &true_rtx, &false_rtx);
4195       if (cond != 0
4196 	  /* If everything is a comparison, what we have is highly unlikely
4197 	     to be simpler, so don't use it.  */
4198 	  && ! (COMPARISON_P (x)
4199 		&& (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
4200 	{
4201 	  rtx cop1 = const0_rtx;
4202 	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
4203 
4204 	  if (cond_code == NE && COMPARISON_P (cond))
4205 	    return x;
4206 
4207 	  /* Simplify the alternative arms; this may collapse the true and
4208 	     false arms to store-flag values.  Be careful to use copy_rtx
4209 	     here since true_rtx or false_rtx might share RTL with x as a
4210 	     result of the if_then_else_cond call above.  */
4211 	  true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
4212 	  false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
4213 
4214 	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
4215 	     is unlikely to be simpler.  */
4216 	  if (general_operand (true_rtx, VOIDmode)
4217 	      && general_operand (false_rtx, VOIDmode))
4218 	    {
4219 	      enum rtx_code reversed;
4220 
4221 	      /* Restarting if we generate a store-flag expression will cause
4222 		 us to loop.  Just drop through in this case.  */
4223 
4224 	      /* If the result values are STORE_FLAG_VALUE and zero, we can
4225 		 just make the comparison operation.  */
4226 	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
4227 		x = simplify_gen_relational (cond_code, mode, VOIDmode,
4228 					     cond, cop1);
4229 	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
4230 		       && ((reversed = reversed_comparison_code_parts
4231 					(cond_code, cond, cop1, NULL))
4232 			   != UNKNOWN))
4233 		x = simplify_gen_relational (reversed, mode, VOIDmode,
4234 					     cond, cop1);
4235 
4236 	      /* Likewise, we can make the negate of a comparison operation
4237 		 if the result values are - STORE_FLAG_VALUE and zero.  */
4238 	      else if (GET_CODE (true_rtx) == CONST_INT
4239 		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
4240 		       && false_rtx == const0_rtx)
4241 		x = simplify_gen_unary (NEG, mode,
4242 					simplify_gen_relational (cond_code,
4243 								 mode, VOIDmode,
4244 								 cond, cop1),
4245 					mode);
4246 	      else if (GET_CODE (false_rtx) == CONST_INT
4247 		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
4248 		       && true_rtx == const0_rtx
4249 		       && ((reversed = reversed_comparison_code_parts
4250 					(cond_code, cond, cop1, NULL))
4251 			   != UNKNOWN))
4252 		x = simplify_gen_unary (NEG, mode,
4253 					simplify_gen_relational (reversed,
4254 								 mode, VOIDmode,
4255 								 cond, cop1),
4256 					mode);
4257 	      else
4258 		return gen_rtx_IF_THEN_ELSE (mode,
4259 					     simplify_gen_relational (cond_code,
4260 								      mode,
4261 								      VOIDmode,
4262 								      cond,
4263 								      cop1),
4264 					     true_rtx, false_rtx);
4265 
4266 	      code = GET_CODE (x);
4267 	      op0_mode = VOIDmode;
4268 	    }
4269 	}
4270     }
4271 
4272   /* Try to fold this expression in case we have constants that weren't
4273      present before.  */
4274   temp = 0;
4275   switch (GET_RTX_CLASS (code))
4276     {
4277     case RTX_UNARY:
4278       if (op0_mode == VOIDmode)
4279 	op0_mode = GET_MODE (XEXP (x, 0));
4280       temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
4281       break;
4282     case RTX_COMPARE:
4283     case RTX_COMM_COMPARE:
4284       {
4285 	enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
4286 	if (cmp_mode == VOIDmode)
4287 	  {
4288 	    cmp_mode = GET_MODE (XEXP (x, 1));
4289 	    if (cmp_mode == VOIDmode)
4290 	      cmp_mode = op0_mode;
4291 	  }
4292 	temp = simplify_relational_operation (code, mode, cmp_mode,
4293 					      XEXP (x, 0), XEXP (x, 1));
4294       }
4295       break;
4296     case RTX_COMM_ARITH:
4297     case RTX_BIN_ARITH:
4298       temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
4299       break;
4300     case RTX_BITFIELD_OPS:
4301     case RTX_TERNARY:
4302       temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
4303 					 XEXP (x, 1), XEXP (x, 2));
4304       break;
4305     default:
4306       break;
4307     }
4308 
4309   if (temp)
4310     {
4311       x = temp;
4312       code = GET_CODE (temp);
4313       op0_mode = VOIDmode;
4314       mode = GET_MODE (temp);
4315     }
4316 
4317   /* First see if we can apply the inverse distributive law.  */
4318   if (code == PLUS || code == MINUS
4319       || code == AND || code == IOR || code == XOR)
4320     {
4321       x = apply_distributive_law (x);
4322       code = GET_CODE (x);
4323       op0_mode = VOIDmode;
4324     }
4325 
4326   /* If CODE is an associative operation not otherwise handled, see if we
4327      can associate some operands.  This can win if they are constants or
4328      if they are logically related (i.e. (a & b) & a).  */
4329   if ((code == PLUS || code == MINUS || code == MULT || code == DIV
4330        || code == AND || code == IOR || code == XOR
4331        || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
4332       && ((INTEGRAL_MODE_P (mode) && code != DIV)
4333 	  || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
4334     {
4335       if (GET_CODE (XEXP (x, 0)) == code)
4336 	{
4337 	  rtx other = XEXP (XEXP (x, 0), 0);
4338 	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
4339 	  rtx inner_op1 = XEXP (x, 1);
4340 	  rtx inner;
4341 
4342 	  /* Make sure we pass the constant operand if any as the second
4343 	     one if this is a commutative operation.  */
4344 	  if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
4345 	    {
4346 	      rtx tem = inner_op0;
4347 	      inner_op0 = inner_op1;
4348 	      inner_op1 = tem;
4349 	    }
4350 	  inner = simplify_binary_operation (code == MINUS ? PLUS
4351 					     : code == DIV ? MULT
4352 					     : code,
4353 					     mode, inner_op0, inner_op1);
4354 
4355 	  /* For commutative operations, try the other pair if that one
4356 	     didn't simplify.  */
4357 	  if (inner == 0 && COMMUTATIVE_ARITH_P (x))
4358 	    {
4359 	      other = XEXP (XEXP (x, 0), 1);
4360 	      inner = simplify_binary_operation (code, mode,
4361 						 XEXP (XEXP (x, 0), 0),
4362 						 XEXP (x, 1));
4363 	    }
4364 
4365 	  if (inner)
4366 	    return simplify_gen_binary (code, mode, other, inner);
4367 	}
4368     }
4369 
4370   /* A little bit of algebraic simplification here.  */
4371   switch (code)
4372     {
4373     case MEM:
4374       /* Ensure that our address has any ASHIFTs converted to MULT in case
4375 	 address-recognizing predicates are called later.  */
4376       temp = make_compound_operation (XEXP (x, 0), MEM);
4377       SUBST (XEXP (x, 0), temp);
4378       break;
4379 
4380     case SUBREG:
4381       if (op0_mode == VOIDmode)
4382 	op0_mode = GET_MODE (SUBREG_REG (x));
4383 
4384       /* See if this can be moved to simplify_subreg.  */
4385       if (CONSTANT_P (SUBREG_REG (x))
4386 	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
4387 	     /* Don't call gen_lowpart if the inner mode
4388 		is VOIDmode and we cannot simplify it, as SUBREG without
4389 		inner mode is invalid.  */
4390 	  && (GET_MODE (SUBREG_REG (x)) != VOIDmode
4391 	      || gen_lowpart_common (mode, SUBREG_REG (x))))
4392 	return gen_lowpart (mode, SUBREG_REG (x));
4393 
4394       if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
4395 	break;
4396       {
4397 	rtx temp;
4398 	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
4399 				SUBREG_BYTE (x));
4400 	if (temp)
4401 	  return temp;
4402       }
4403 
4404       /* Don't change the mode of the MEM if that would change the meaning
4405 	 of the address.  */
4406       if (MEM_P (SUBREG_REG (x))
4407 	  && (MEM_VOLATILE_P (SUBREG_REG (x))
4408 	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
4409 	return gen_rtx_CLOBBER (mode, const0_rtx);
4410 
4411       /* Note that we cannot do any narrowing for non-constants since
4412 	 we might have been counting on using the fact that some bits were
4413 	 zero.  We now do this in the SET.  */
4414 
4415       break;
4416 
4417     case NEG:
4418       temp = expand_compound_operation (XEXP (x, 0));
4419 
4420       /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4421 	 replaced by (lshiftrt X C).  This will convert
4422 	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
4423 
4424       if (GET_CODE (temp) == ASHIFTRT
4425 	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
4426 	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4427 	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
4428 				     INTVAL (XEXP (temp, 1)));
4429 
4430       /* If X has only a single bit that might be nonzero, say, bit I, convert
4431 	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4432 	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
4433 	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
4434 	 or a SUBREG of one since we'd be making the expression more
4435 	 complex if it was just a register.  */
4436 
4437       if (!REG_P (temp)
4438 	  && ! (GET_CODE (temp) == SUBREG
4439 		&& REG_P (SUBREG_REG (temp)))
4440 	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4441 	{
4442 	  rtx temp1 = simplify_shift_const
4443 	    (NULL_RTX, ASHIFTRT, mode,
4444 	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4445 				   GET_MODE_BITSIZE (mode) - 1 - i),
4446 	     GET_MODE_BITSIZE (mode) - 1 - i);
4447 
4448 	  /* If all we did was surround TEMP with the two shifts, we
4449 	     haven't improved anything, so don't use it.  Otherwise,
4450 	     we are better off with TEMP1.  */
4451 	  if (GET_CODE (temp1) != ASHIFTRT
4452 	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4453 	      || XEXP (XEXP (temp1, 0), 0) != temp)
4454 	    return temp1;
4455 	}
4456       break;
4457 
4458     case TRUNCATE:
4459       /* We can't handle truncation to a partial integer mode here
4460 	 because we don't know the real bitsize of the partial
4461 	 integer mode.  */
4462       if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4463 	break;
4464 
4465       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4466 	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4467 				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4468 	SUBST (XEXP (x, 0),
4469 	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4470 			      GET_MODE_MASK (mode), 0));
4471 
4472       /* Similarly to what we do in simplify-rtx.c, a truncate of a register
4473 	 whose value is a comparison can be replaced with a subreg if
4474 	 STORE_FLAG_VALUE permits.  */
4475       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4476 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4477 	  && (temp = get_last_value (XEXP (x, 0)))
4478 	  && COMPARISON_P (temp))
4479 	return gen_lowpart (mode, XEXP (x, 0));
4480       break;
4481 
4482 #ifdef HAVE_cc0
4483     case COMPARE:
4484       /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4485 	 using cc0, in which case we want to leave it as a COMPARE
4486 	 so we can distinguish it from a register-register-copy.  */
4487       if (XEXP (x, 1) == const0_rtx)
4488 	return XEXP (x, 0);
4489 
4490       /* x - 0 is the same as x unless x's mode has signed zeros and
4491 	 allows rounding towards -infinity.  Under those conditions,
4492 	 0 - 0 is -0.  */
4493       if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4494 	    && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4495 	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4496 	return XEXP (x, 0);
4497       break;
4498 #endif
4499 
4500     case CONST:
4501       /* (const (const X)) can become (const X).  Do it this way rather than
4502 	 returning the inner CONST since CONST can be shared with a
4503 	 REG_EQUAL note.  */
4504       if (GET_CODE (XEXP (x, 0)) == CONST)
4505 	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4506       break;
4507 
4508 #ifdef HAVE_lo_sum
4509     case LO_SUM:
4510       /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4511 	 can add in an offset.  find_split_point will split this address up
4512 	 again if it doesn't match.  */
4513       if (GET_CODE (XEXP (x, 0)) == HIGH
4514 	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4515 	return XEXP (x, 1);
4516       break;
4517 #endif
4518 
4519     case PLUS:
4520       /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4521 	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4522 	 bit-field and can be replaced by either a sign_extend or a
4523 	 sign_extract.  The `and' may be a zero_extend and the two
4524 	 <c>, -<c> constants may be reversed.  */
4525       if (GET_CODE (XEXP (x, 0)) == XOR
4526 	  && GET_CODE (XEXP (x, 1)) == CONST_INT
4527 	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4528 	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4529 	  && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4530 	      || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4531 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4532 	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4533 	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4534 	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4535 		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4536 	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4537 		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4538 		      == (unsigned int) i + 1))))
4539 	return simplify_shift_const
4540 	  (NULL_RTX, ASHIFTRT, mode,
4541 	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4542 				 XEXP (XEXP (XEXP (x, 0), 0), 0),
4543 				 GET_MODE_BITSIZE (mode) - (i + 1)),
4544 	   GET_MODE_BITSIZE (mode) - (i + 1));
4545 
4546       /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4547 	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4548 	 the bitsize of the mode - 1.  This allows simplification of
4549 	 "a = (b & 8) == 0;"  */
4550       if (XEXP (x, 1) == constm1_rtx
4551 	  && !REG_P (XEXP (x, 0))
4552 	  && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4553 		&& REG_P (SUBREG_REG (XEXP (x, 0))))
4554 	  && nonzero_bits (XEXP (x, 0), mode) == 1)
4555 	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4556 	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4557 				 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4558 				 GET_MODE_BITSIZE (mode) - 1),
4559 	   GET_MODE_BITSIZE (mode) - 1);
4560 
4561       /* If we are adding two things that have no bits in common, convert
4562 	 the addition into an IOR.  This will often be further simplified,
4563 	 for example in cases like ((a & 1) + (a & 2)), which can
4564 	 become a & 3.  */
4565 
4566       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4567 	  && (nonzero_bits (XEXP (x, 0), mode)
4568 	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
4569 	{
4570 	  /* Try to simplify the expression further.  */
4571 	  rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4572 	  temp = combine_simplify_rtx (tor, mode, in_dest);
4573 
4574 	  /* If we could, great.  If not, do not go ahead with the IOR
4575 	     replacement, since PLUS appears in many special purpose
4576 	     address arithmetic instructions.  */
4577 	  if (GET_CODE (temp) != CLOBBER && temp != tor)
4578 	    return temp;
4579 	}
4580       break;
4581 
4582     case MINUS:
4583       /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4584 	 (and <foo> (const_int pow2-1))  */
4585       if (GET_CODE (XEXP (x, 1)) == AND
4586 	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4587 	  && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4588 	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4589 	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4590 				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4591       break;
4592 
4593     case MULT:
4594       /* If we have (mult (plus A B) C), apply the distributive law and then
4595 	 the inverse distributive law to see if things simplify.  This
4596 	 occurs mostly in addresses, often when unrolling loops.  */
4597 
4598       if (GET_CODE (XEXP (x, 0)) == PLUS)
4599 	{
4600 	  rtx result = distribute_and_simplify_rtx (x, 0);
4601 	  if (result)
4602 	    return result;
4603 	}
4604 
4605       /* Try simplify a*(b/c) as (a*b)/c.  */
4606       if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4607 	  && GET_CODE (XEXP (x, 0)) == DIV)
4608 	{
4609 	  rtx tem = simplify_binary_operation (MULT, mode,
4610 					       XEXP (XEXP (x, 0), 0),
4611 					       XEXP (x, 1));
4612 	  if (tem)
4613 	    return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4614 	}
4615       break;
4616 
4617     case UDIV:
4618       /* If this is a divide by a power of two, treat it as a shift if
4619 	 its first operand is a shift.  */
4620       if (GET_CODE (XEXP (x, 1)) == CONST_INT
4621 	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4622 	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4623 	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4624 	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4625 	      || GET_CODE (XEXP (x, 0)) == ROTATE
4626 	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4627 	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4628       break;
4629 
4630     case EQ:  case NE:
4631     case GT:  case GTU:  case GE:  case GEU:
4632     case LT:  case LTU:  case LE:  case LEU:
4633     case UNEQ:  case LTGT:
4634     case UNGT:  case UNGE:
4635     case UNLT:  case UNLE:
4636     case UNORDERED: case ORDERED:
4637       /* If the first operand is a condition code, we can't do anything
4638 	 with it.  */
4639       if (GET_CODE (XEXP (x, 0)) == COMPARE
4640 	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4641 	      && ! CC0_P (XEXP (x, 0))))
4642 	{
4643 	  rtx op0 = XEXP (x, 0);
4644 	  rtx op1 = XEXP (x, 1);
4645 	  enum rtx_code new_code;
4646 
4647 	  if (GET_CODE (op0) == COMPARE)
4648 	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4649 
4650 	  /* Simplify our comparison, if possible.  */
4651 	  new_code = simplify_comparison (code, &op0, &op1);
4652 
4653 	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4654 	     if only the low-order bit is possibly nonzero in X (such as when
4655 	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4656 	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4657 	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4658 	     (plus X 1).
4659 
4660 	     Remove any ZERO_EXTRACT we made when thinking this was a
4661 	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4662 	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4663 	     the call to make_compound_operation in the SET case.  */
4664 
4665 	  if (STORE_FLAG_VALUE == 1
4666 	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4667 	      && op1 == const0_rtx
4668 	      && mode == GET_MODE (op0)
4669 	      && nonzero_bits (op0, mode) == 1)
4670 	    return gen_lowpart (mode,
4671 				expand_compound_operation (op0));
4672 
4673 	  else if (STORE_FLAG_VALUE == 1
4674 		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4675 		   && op1 == const0_rtx
4676 		   && mode == GET_MODE (op0)
4677 		   && (num_sign_bit_copies (op0, mode)
4678 		       == GET_MODE_BITSIZE (mode)))
4679 	    {
4680 	      op0 = expand_compound_operation (op0);
4681 	      return simplify_gen_unary (NEG, mode,
4682 					 gen_lowpart (mode, op0),
4683 					 mode);
4684 	    }
4685 
4686 	  else if (STORE_FLAG_VALUE == 1
4687 		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4688 		   && op1 == const0_rtx
4689 		   && mode == GET_MODE (op0)
4690 		   && nonzero_bits (op0, mode) == 1)
4691 	    {
4692 	      op0 = expand_compound_operation (op0);
4693 	      return simplify_gen_binary (XOR, mode,
4694 					  gen_lowpart (mode, op0),
4695 					  const1_rtx);
4696 	    }
4697 
4698 	  else if (STORE_FLAG_VALUE == 1
4699 		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4700 		   && op1 == const0_rtx
4701 		   && mode == GET_MODE (op0)
4702 		   && (num_sign_bit_copies (op0, mode)
4703 		       == GET_MODE_BITSIZE (mode)))
4704 	    {
4705 	      op0 = expand_compound_operation (op0);
4706 	      return plus_constant (gen_lowpart (mode, op0), 1);
4707 	    }
4708 
4709 	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4710 	     those above.  */
4711 	  if (STORE_FLAG_VALUE == -1
4712 	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4713 	      && op1 == const0_rtx
4714 	      && (num_sign_bit_copies (op0, mode)
4715 		  == GET_MODE_BITSIZE (mode)))
4716 	    return gen_lowpart (mode,
4717 				expand_compound_operation (op0));
4718 
4719 	  else if (STORE_FLAG_VALUE == -1
4720 		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4721 		   && op1 == const0_rtx
4722 		   && mode == GET_MODE (op0)
4723 		   && nonzero_bits (op0, mode) == 1)
4724 	    {
4725 	      op0 = expand_compound_operation (op0);
4726 	      return simplify_gen_unary (NEG, mode,
4727 					 gen_lowpart (mode, op0),
4728 					 mode);
4729 	    }
4730 
4731 	  else if (STORE_FLAG_VALUE == -1
4732 		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4733 		   && op1 == const0_rtx
4734 		   && mode == GET_MODE (op0)
4735 		   && (num_sign_bit_copies (op0, mode)
4736 		       == GET_MODE_BITSIZE (mode)))
4737 	    {
4738 	      op0 = expand_compound_operation (op0);
4739 	      return simplify_gen_unary (NOT, mode,
4740 					 gen_lowpart (mode, op0),
4741 					 mode);
4742 	    }
4743 
4744 	  /* If X is 0/1, (eq X 0) is X-1.  */
4745 	  else if (STORE_FLAG_VALUE == -1
4746 		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4747 		   && op1 == const0_rtx
4748 		   && mode == GET_MODE (op0)
4749 		   && nonzero_bits (op0, mode) == 1)
4750 	    {
4751 	      op0 = expand_compound_operation (op0);
4752 	      return plus_constant (gen_lowpart (mode, op0), -1);
4753 	    }
4754 
4755 	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4756 	     one bit that might be nonzero, we can convert (ne x 0) to
4757 	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4758 	     AND with STORE_FLAG_VALUE when we are done, since we are only
4759 	     going to test the sign bit.  */
4760 	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4761 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4762 	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4763 		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4764 	      && op1 == const0_rtx
4765 	      && mode == GET_MODE (op0)
4766 	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4767 	    {
4768 	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4769 					expand_compound_operation (op0),
4770 					GET_MODE_BITSIZE (mode) - 1 - i);
4771 	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4772 		return XEXP (x, 0);
4773 	      else
4774 		return x;
4775 	    }
4776 
4777 	  /* If the code changed, return a whole new comparison.  */
4778 	  if (new_code != code)
4779 	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4780 
4781 	  /* Otherwise, keep this operation, but maybe change its operands.
4782 	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4783 	  SUBST (XEXP (x, 0), op0);
4784 	  SUBST (XEXP (x, 1), op1);
4785 	}
4786       break;
4787 
4788     case IF_THEN_ELSE:
4789       return simplify_if_then_else (x);
4790 
4791     case ZERO_EXTRACT:
4792     case SIGN_EXTRACT:
4793     case ZERO_EXTEND:
4794     case SIGN_EXTEND:
4795       /* If we are processing SET_DEST, we are done.  */
4796       if (in_dest)
4797 	return x;
4798 
4799       return expand_compound_operation (x);
4800 
4801     case SET:
4802       return simplify_set (x);
4803 
4804     case AND:
4805     case IOR:
4806       return simplify_logical (x);
4807 
4808     case ASHIFT:
4809     case LSHIFTRT:
4810     case ASHIFTRT:
4811     case ROTATE:
4812     case ROTATERT:
4813       /* If this is a shift by a constant amount, simplify it.  */
4814       if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4815 	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4816 				     INTVAL (XEXP (x, 1)));
4817 
4818       else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
4819 	SUBST (XEXP (x, 1),
4820 	       force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4821 			      ((HOST_WIDE_INT) 1
4822 			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4823 			      - 1,
4824 			      0));
4825       break;
4826 
4827     default:
4828       break;
4829     }
4830 
4831   return x;
4832 }
4833 
4834 /* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4835 
4836 static rtx
simplify_if_then_else(rtx x)4837 simplify_if_then_else (rtx x)
4838 {
4839   enum machine_mode mode = GET_MODE (x);
4840   rtx cond = XEXP (x, 0);
4841   rtx true_rtx = XEXP (x, 1);
4842   rtx false_rtx = XEXP (x, 2);
4843   enum rtx_code true_code = GET_CODE (cond);
4844   int comparison_p = COMPARISON_P (cond);
4845   rtx temp;
4846   int i;
4847   enum rtx_code false_code;
4848   rtx reversed;
4849 
4850   /* Simplify storing of the truth value.  */
4851   if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4852     return simplify_gen_relational (true_code, mode, VOIDmode,
4853 				    XEXP (cond, 0), XEXP (cond, 1));
4854 
4855   /* Also when the truth value has to be reversed.  */
4856   if (comparison_p
4857       && true_rtx == const0_rtx && false_rtx == const_true_rtx
4858       && (reversed = reversed_comparison (cond, mode)))
4859     return reversed;
4860 
4861   /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4862      in it is being compared against certain values.  Get the true and false
4863      comparisons and see if that says anything about the value of each arm.  */
4864 
4865   if (comparison_p
4866       && ((false_code = reversed_comparison_code (cond, NULL))
4867 	  != UNKNOWN)
4868       && REG_P (XEXP (cond, 0)))
4869     {
4870       HOST_WIDE_INT nzb;
4871       rtx from = XEXP (cond, 0);
4872       rtx true_val = XEXP (cond, 1);
4873       rtx false_val = true_val;
4874       int swapped = 0;
4875 
4876       /* If FALSE_CODE is EQ, swap the codes and arms.  */
4877 
4878       if (false_code == EQ)
4879 	{
4880 	  swapped = 1, true_code = EQ, false_code = NE;
4881 	  temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4882 	}
4883 
4884       /* If we are comparing against zero and the expression being tested has
4885 	 only a single bit that might be nonzero, that is its value when it is
4886 	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4887 
4888       if (true_code == EQ && true_val == const0_rtx
4889 	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4890 	false_code = EQ, false_val = GEN_INT (nzb);
4891       else if (true_code == EQ && true_val == const0_rtx
4892 	       && (num_sign_bit_copies (from, GET_MODE (from))
4893 		   == GET_MODE_BITSIZE (GET_MODE (from))))
4894 	false_code = EQ, false_val = constm1_rtx;
4895 
4896       /* Now simplify an arm if we know the value of the register in the
4897 	 branch and it is used in the arm.  Be careful due to the potential
4898 	 of locally-shared RTL.  */
4899 
4900       if (reg_mentioned_p (from, true_rtx))
4901 	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4902 				      from, true_val),
4903 		      pc_rtx, pc_rtx, 0, 0);
4904       if (reg_mentioned_p (from, false_rtx))
4905 	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4906 				   from, false_val),
4907 		       pc_rtx, pc_rtx, 0, 0);
4908 
4909       SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4910       SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4911 
4912       true_rtx = XEXP (x, 1);
4913       false_rtx = XEXP (x, 2);
4914       true_code = GET_CODE (cond);
4915     }
4916 
4917   /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4918      reversed, do so to avoid needing two sets of patterns for
4919      subtract-and-branch insns.  Similarly if we have a constant in the true
4920      arm, the false arm is the same as the first operand of the comparison, or
4921      the false arm is more complicated than the true arm.  */
4922 
4923   if (comparison_p
4924       && reversed_comparison_code (cond, NULL) != UNKNOWN
4925       && (true_rtx == pc_rtx
4926 	  || (CONSTANT_P (true_rtx)
4927 	      && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4928 	  || true_rtx == const0_rtx
4929 	  || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
4930 	  || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
4931 	      && !OBJECT_P (false_rtx))
4932 	  || reg_mentioned_p (true_rtx, false_rtx)
4933 	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4934     {
4935       true_code = reversed_comparison_code (cond, NULL);
4936       SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
4937       SUBST (XEXP (x, 1), false_rtx);
4938       SUBST (XEXP (x, 2), true_rtx);
4939 
4940       temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4941       cond = XEXP (x, 0);
4942 
4943       /* It is possible that the conditional has been simplified out.  */
4944       true_code = GET_CODE (cond);
4945       comparison_p = COMPARISON_P (cond);
4946     }
4947 
4948   /* If the two arms are identical, we don't need the comparison.  */
4949 
4950   if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4951     return true_rtx;
4952 
4953   /* Convert a == b ? b : a to "a".  */
4954   if (true_code == EQ && ! side_effects_p (cond)
4955       && !HONOR_NANS (mode)
4956       && rtx_equal_p (XEXP (cond, 0), false_rtx)
4957       && rtx_equal_p (XEXP (cond, 1), true_rtx))
4958     return false_rtx;
4959   else if (true_code == NE && ! side_effects_p (cond)
4960 	   && !HONOR_NANS (mode)
4961 	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
4962 	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
4963     return true_rtx;
4964 
4965   /* Look for cases where we have (abs x) or (neg (abs X)).  */
4966 
4967   if (GET_MODE_CLASS (mode) == MODE_INT
4968       && GET_CODE (false_rtx) == NEG
4969       && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4970       && comparison_p
4971       && rtx_equal_p (true_rtx, XEXP (cond, 0))
4972       && ! side_effects_p (true_rtx))
4973     switch (true_code)
4974       {
4975       case GT:
4976       case GE:
4977 	return simplify_gen_unary (ABS, mode, true_rtx, mode);
4978       case LT:
4979       case LE:
4980 	return
4981 	  simplify_gen_unary (NEG, mode,
4982 			      simplify_gen_unary (ABS, mode, true_rtx, mode),
4983 			      mode);
4984       default:
4985 	break;
4986       }
4987 
4988   /* Look for MIN or MAX.  */
4989 
4990   if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4991       && comparison_p
4992       && rtx_equal_p (XEXP (cond, 0), true_rtx)
4993       && rtx_equal_p (XEXP (cond, 1), false_rtx)
4994       && ! side_effects_p (cond))
4995     switch (true_code)
4996       {
4997       case GE:
4998       case GT:
4999 	return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
5000       case LE:
5001       case LT:
5002 	return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5003       case GEU:
5004       case GTU:
5005 	return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5006       case LEU:
5007       case LTU:
5008 	return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5009       default:
5010 	break;
5011       }
5012 
5013   /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5014      second operand is zero, this can be done as (OP Z (mult COND C2)) where
5015      C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5016      SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5017      We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5018      neither 1 or -1, but it isn't worth checking for.  */
5019 
5020   if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5021       && comparison_p
5022       && GET_MODE_CLASS (mode) == MODE_INT
5023       && ! side_effects_p (x))
5024     {
5025       rtx t = make_compound_operation (true_rtx, SET);
5026       rtx f = make_compound_operation (false_rtx, SET);
5027       rtx cond_op0 = XEXP (cond, 0);
5028       rtx cond_op1 = XEXP (cond, 1);
5029       enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5030       enum machine_mode m = mode;
5031       rtx z = 0, c1 = NULL_RTX;
5032 
5033       if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5034 	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5035 	   || GET_CODE (t) == ASHIFT
5036 	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5037 	  && rtx_equal_p (XEXP (t, 0), f))
5038 	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5039 
5040       /* If an identity-zero op is commutative, check whether there
5041 	 would be a match if we swapped the operands.  */
5042       else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5043 		|| GET_CODE (t) == XOR)
5044 	       && rtx_equal_p (XEXP (t, 1), f))
5045 	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5046       else if (GET_CODE (t) == SIGN_EXTEND
5047 	       && (GET_CODE (XEXP (t, 0)) == PLUS
5048 		   || GET_CODE (XEXP (t, 0)) == MINUS
5049 		   || GET_CODE (XEXP (t, 0)) == IOR
5050 		   || GET_CODE (XEXP (t, 0)) == XOR
5051 		   || GET_CODE (XEXP (t, 0)) == ASHIFT
5052 		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5053 		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5054 	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5055 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5056 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5057 	       && (num_sign_bit_copies (f, GET_MODE (f))
5058 		   > (unsigned int)
5059 		     (GET_MODE_BITSIZE (mode)
5060 		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5061 	{
5062 	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5063 	  extend_op = SIGN_EXTEND;
5064 	  m = GET_MODE (XEXP (t, 0));
5065 	}
5066       else if (GET_CODE (t) == SIGN_EXTEND
5067 	       && (GET_CODE (XEXP (t, 0)) == PLUS
5068 		   || GET_CODE (XEXP (t, 0)) == IOR
5069 		   || GET_CODE (XEXP (t, 0)) == XOR)
5070 	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5071 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5072 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5073 	       && (num_sign_bit_copies (f, GET_MODE (f))
5074 		   > (unsigned int)
5075 		     (GET_MODE_BITSIZE (mode)
5076 		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5077 	{
5078 	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5079 	  extend_op = SIGN_EXTEND;
5080 	  m = GET_MODE (XEXP (t, 0));
5081 	}
5082       else if (GET_CODE (t) == ZERO_EXTEND
5083 	       && (GET_CODE (XEXP (t, 0)) == PLUS
5084 		   || GET_CODE (XEXP (t, 0)) == MINUS
5085 		   || GET_CODE (XEXP (t, 0)) == IOR
5086 		   || GET_CODE (XEXP (t, 0)) == XOR
5087 		   || GET_CODE (XEXP (t, 0)) == ASHIFT
5088 		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5089 		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5090 	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5091 	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5092 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5093 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5094 	       && ((nonzero_bits (f, GET_MODE (f))
5095 		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5096 		   == 0))
5097 	{
5098 	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5099 	  extend_op = ZERO_EXTEND;
5100 	  m = GET_MODE (XEXP (t, 0));
5101 	}
5102       else if (GET_CODE (t) == ZERO_EXTEND
5103 	       && (GET_CODE (XEXP (t, 0)) == PLUS
5104 		   || GET_CODE (XEXP (t, 0)) == IOR
5105 		   || GET_CODE (XEXP (t, 0)) == XOR)
5106 	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5107 	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5108 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5109 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5110 	       && ((nonzero_bits (f, GET_MODE (f))
5111 		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5112 		   == 0))
5113 	{
5114 	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5115 	  extend_op = ZERO_EXTEND;
5116 	  m = GET_MODE (XEXP (t, 0));
5117 	}
5118 
5119       if (z)
5120 	{
5121 	  temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5122 						 cond_op0, cond_op1),
5123 			pc_rtx, pc_rtx, 0, 0);
5124 	  temp = simplify_gen_binary (MULT, m, temp,
5125 				      simplify_gen_binary (MULT, m, c1,
5126 							   const_true_rtx));
5127 	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5128 	  temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5129 
5130 	  if (extend_op != UNKNOWN)
5131 	    temp = simplify_gen_unary (extend_op, mode, temp, m);
5132 
5133 	  return temp;
5134 	}
5135     }
5136 
5137   /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5138      1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5139      negation of a single bit, we can convert this operation to a shift.  We
5140      can actually do this more generally, but it doesn't seem worth it.  */
5141 
5142   if (true_code == NE && XEXP (cond, 1) == const0_rtx
5143       && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5144       && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5145 	   && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5146 	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5147 	       == GET_MODE_BITSIZE (mode))
5148 	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5149     return
5150       simplify_shift_const (NULL_RTX, ASHIFT, mode,
5151 			    gen_lowpart (mode, XEXP (cond, 0)), i);
5152 
5153   /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
5154   if (true_code == NE && XEXP (cond, 1) == const0_rtx
5155       && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5156       && GET_MODE (XEXP (cond, 0)) == mode
5157       && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5158 	  == nonzero_bits (XEXP (cond, 0), mode)
5159       && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5160     return XEXP (cond, 0);
5161 
5162   return x;
5163 }
5164 
5165 /* Simplify X, a SET expression.  Return the new expression.  */
5166 
5167 static rtx
simplify_set(rtx x)5168 simplify_set (rtx x)
5169 {
5170   rtx src = SET_SRC (x);
5171   rtx dest = SET_DEST (x);
5172   enum machine_mode mode
5173     = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5174   rtx other_insn;
5175   rtx *cc_use;
5176 
5177   /* (set (pc) (return)) gets written as (return).  */
5178   if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5179     return src;
5180 
5181   /* Now that we know for sure which bits of SRC we are using, see if we can
5182      simplify the expression for the object knowing that we only need the
5183      low-order bits.  */
5184 
5185   if (GET_MODE_CLASS (mode) == MODE_INT
5186       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5187     {
5188       src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0);
5189       SUBST (SET_SRC (x), src);
5190     }
5191 
5192   /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5193      the comparison result and try to simplify it unless we already have used
5194      undobuf.other_insn.  */
5195   if ((GET_MODE_CLASS (mode) == MODE_CC
5196        || GET_CODE (src) == COMPARE
5197        || CC0_P (dest))
5198       && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5199       && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5200       && COMPARISON_P (*cc_use)
5201       && rtx_equal_p (XEXP (*cc_use, 0), dest))
5202     {
5203       enum rtx_code old_code = GET_CODE (*cc_use);
5204       enum rtx_code new_code;
5205       rtx op0, op1, tmp;
5206       int other_changed = 0;
5207       enum machine_mode compare_mode = GET_MODE (dest);
5208 
5209       if (GET_CODE (src) == COMPARE)
5210 	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5211       else
5212 	op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5213 
5214       tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5215 					   op0, op1);
5216       if (!tmp)
5217 	new_code = old_code;
5218       else if (!CONSTANT_P (tmp))
5219 	{
5220 	  new_code = GET_CODE (tmp);
5221 	  op0 = XEXP (tmp, 0);
5222 	  op1 = XEXP (tmp, 1);
5223 	}
5224       else
5225 	{
5226 	  rtx pat = PATTERN (other_insn);
5227 	  undobuf.other_insn = other_insn;
5228 	  SUBST (*cc_use, tmp);
5229 
5230 	  /* Attempt to simplify CC user.  */
5231 	  if (GET_CODE (pat) == SET)
5232 	    {
5233 	      rtx new = simplify_rtx (SET_SRC (pat));
5234 	      if (new != NULL_RTX)
5235 		SUBST (SET_SRC (pat), new);
5236 	    }
5237 
5238 	  /* Convert X into a no-op move.  */
5239 	  SUBST (SET_DEST (x), pc_rtx);
5240 	  SUBST (SET_SRC (x), pc_rtx);
5241 	  return x;
5242 	}
5243 
5244       /* Simplify our comparison, if possible.  */
5245       new_code = simplify_comparison (new_code, &op0, &op1);
5246 
5247 #ifdef SELECT_CC_MODE
5248       /* If this machine has CC modes other than CCmode, check to see if we
5249 	 need to use a different CC mode here.  */
5250       if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5251 	compare_mode = GET_MODE (op0);
5252       else
5253 	compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5254 
5255 #ifndef HAVE_cc0
5256       /* If the mode changed, we have to change SET_DEST, the mode in the
5257 	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5258 	 a hard register, just build new versions with the proper mode.  If it
5259 	 is a pseudo, we lose unless it is only time we set the pseudo, in
5260 	 which case we can safely change its mode.  */
5261       if (compare_mode != GET_MODE (dest))
5262 	{
5263 	  if (can_change_dest_mode (dest, 0, compare_mode))
5264 	    {
5265 	      unsigned int regno = REGNO (dest);
5266 	      rtx new_dest;
5267 
5268 	      if (regno < FIRST_PSEUDO_REGISTER)
5269 		new_dest = gen_rtx_REG (compare_mode, regno);
5270 	      else
5271 		{
5272 		  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
5273 		  new_dest = regno_reg_rtx[regno];
5274 		}
5275 
5276 	      SUBST (SET_DEST (x), new_dest);
5277 	      SUBST (XEXP (*cc_use, 0), new_dest);
5278 	      other_changed = 1;
5279 
5280 	      dest = new_dest;
5281 	    }
5282 	}
5283 #endif  /* cc0 */
5284 #endif  /* SELECT_CC_MODE */
5285 
5286       /* If the code changed, we have to build a new comparison in
5287 	 undobuf.other_insn.  */
5288       if (new_code != old_code)
5289 	{
5290 	  int other_changed_previously = other_changed;
5291 	  unsigned HOST_WIDE_INT mask;
5292 
5293 	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5294 					  dest, const0_rtx));
5295 	  other_changed = 1;
5296 
5297 	  /* If the only change we made was to change an EQ into an NE or
5298 	     vice versa, OP0 has only one bit that might be nonzero, and OP1
5299 	     is zero, check if changing the user of the condition code will
5300 	     produce a valid insn.  If it won't, we can keep the original code
5301 	     in that insn by surrounding our operation with an XOR.  */
5302 
5303 	  if (((old_code == NE && new_code == EQ)
5304 	       || (old_code == EQ && new_code == NE))
5305 	      && ! other_changed_previously && op1 == const0_rtx
5306 	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5307 	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5308 	    {
5309 	      rtx pat = PATTERN (other_insn), note = 0;
5310 
5311 	      if ((recog_for_combine (&pat, other_insn, &note) < 0
5312 		   && ! check_asm_operands (pat)))
5313 		{
5314 		  PUT_CODE (*cc_use, old_code);
5315 		  other_changed = 0;
5316 
5317 		  op0 = simplify_gen_binary (XOR, GET_MODE (op0),
5318 					     op0, GEN_INT (mask));
5319 		}
5320 	    }
5321 	}
5322 
5323       if (other_changed)
5324 	undobuf.other_insn = other_insn;
5325 
5326 #ifdef HAVE_cc0
5327       /* If we are now comparing against zero, change our source if
5328 	 needed.  If we do not use cc0, we always have a COMPARE.  */
5329       if (op1 == const0_rtx && dest == cc0_rtx)
5330 	{
5331 	  SUBST (SET_SRC (x), op0);
5332 	  src = op0;
5333 	}
5334       else
5335 #endif
5336 
5337       /* Otherwise, if we didn't previously have a COMPARE in the
5338 	 correct mode, we need one.  */
5339       if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5340 	{
5341 	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5342 	  src = SET_SRC (x);
5343 	}
5344       else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
5345 	{
5346 	  SUBST (SET_SRC (x), op0);
5347 	  src = SET_SRC (x);
5348 	}
5349       /* Otherwise, update the COMPARE if needed.  */
5350       else if (XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
5351 	{
5352 	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5353 	  src = SET_SRC (x);
5354 	}
5355     }
5356   else
5357     {
5358       /* Get SET_SRC in a form where we have placed back any
5359 	 compound expressions.  Then do the checks below.  */
5360       src = make_compound_operation (src, SET);
5361       SUBST (SET_SRC (x), src);
5362     }
5363 
5364   /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5365      and X being a REG or (subreg (reg)), we may be able to convert this to
5366      (set (subreg:m2 x) (op)).
5367 
5368      We can always do this if M1 is narrower than M2 because that means that
5369      we only care about the low bits of the result.
5370 
5371      However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5372      perform a narrower operation than requested since the high-order bits will
5373      be undefined.  On machine where it is defined, this transformation is safe
5374      as long as M1 and M2 have the same number of words.  */
5375 
5376   if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5377       && !OBJECT_P (SUBREG_REG (src))
5378       && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5379 	   / UNITS_PER_WORD)
5380 	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5381 	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5382 #ifndef WORD_REGISTER_OPERATIONS
5383       && (GET_MODE_SIZE (GET_MODE (src))
5384 	< GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5385 #endif
5386 #ifdef CANNOT_CHANGE_MODE_CLASS
5387       && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
5388 	    && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5389 					 GET_MODE (SUBREG_REG (src)),
5390 					 GET_MODE (src)))
5391 #endif
5392       && (REG_P (dest)
5393 	  || (GET_CODE (dest) == SUBREG
5394 	      && REG_P (SUBREG_REG (dest)))))
5395     {
5396       SUBST (SET_DEST (x),
5397 	     gen_lowpart (GET_MODE (SUBREG_REG (src)),
5398 				      dest));
5399       SUBST (SET_SRC (x), SUBREG_REG (src));
5400 
5401       src = SET_SRC (x), dest = SET_DEST (x);
5402     }
5403 
5404 #ifdef HAVE_cc0
5405   /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5406      in SRC.  */
5407   if (dest == cc0_rtx
5408       && GET_CODE (src) == SUBREG
5409       && subreg_lowpart_p (src)
5410       && (GET_MODE_BITSIZE (GET_MODE (src))
5411 	  < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5412     {
5413       rtx inner = SUBREG_REG (src);
5414       enum machine_mode inner_mode = GET_MODE (inner);
5415 
5416       /* Here we make sure that we don't have a sign bit on.  */
5417       if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5418 	  && (nonzero_bits (inner, inner_mode)
5419 	      < ((unsigned HOST_WIDE_INT) 1
5420 		 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5421 	{
5422 	  SUBST (SET_SRC (x), inner);
5423 	  src = SET_SRC (x);
5424 	}
5425     }
5426 #endif
5427 
5428 #ifdef LOAD_EXTEND_OP
5429   /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5430      would require a paradoxical subreg.  Replace the subreg with a
5431      zero_extend to avoid the reload that would otherwise be required.  */
5432 
5433   if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5434       && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
5435       && SUBREG_BYTE (src) == 0
5436       && (GET_MODE_SIZE (GET_MODE (src))
5437 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5438       && MEM_P (SUBREG_REG (src)))
5439     {
5440       SUBST (SET_SRC (x),
5441 	     gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5442 			    GET_MODE (src), SUBREG_REG (src)));
5443 
5444       src = SET_SRC (x);
5445     }
5446 #endif
5447 
5448   /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5449      are comparing an item known to be 0 or -1 against 0, use a logical
5450      operation instead. Check for one of the arms being an IOR of the other
5451      arm with some value.  We compute three terms to be IOR'ed together.  In
5452      practice, at most two will be nonzero.  Then we do the IOR's.  */
5453 
5454   if (GET_CODE (dest) != PC
5455       && GET_CODE (src) == IF_THEN_ELSE
5456       && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5457       && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5458       && XEXP (XEXP (src, 0), 1) == const0_rtx
5459       && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5460 #ifdef HAVE_conditional_move
5461       && ! can_conditionally_move_p (GET_MODE (src))
5462 #endif
5463       && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5464 			       GET_MODE (XEXP (XEXP (src, 0), 0)))
5465 	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5466       && ! side_effects_p (src))
5467     {
5468       rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5469 		      ? XEXP (src, 1) : XEXP (src, 2));
5470       rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5471 		   ? XEXP (src, 2) : XEXP (src, 1));
5472       rtx term1 = const0_rtx, term2, term3;
5473 
5474       if (GET_CODE (true_rtx) == IOR
5475 	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5476 	term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
5477       else if (GET_CODE (true_rtx) == IOR
5478 	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5479 	term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
5480       else if (GET_CODE (false_rtx) == IOR
5481 	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5482 	term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
5483       else if (GET_CODE (false_rtx) == IOR
5484 	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5485 	term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
5486 
5487       term2 = simplify_gen_binary (AND, GET_MODE (src),
5488 				   XEXP (XEXP (src, 0), 0), true_rtx);
5489       term3 = simplify_gen_binary (AND, GET_MODE (src),
5490 				   simplify_gen_unary (NOT, GET_MODE (src),
5491 						       XEXP (XEXP (src, 0), 0),
5492 						       GET_MODE (src)),
5493 				   false_rtx);
5494 
5495       SUBST (SET_SRC (x),
5496 	     simplify_gen_binary (IOR, GET_MODE (src),
5497 				  simplify_gen_binary (IOR, GET_MODE (src),
5498 						       term1, term2),
5499 				  term3));
5500 
5501       src = SET_SRC (x);
5502     }
5503 
5504   /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5505      whole thing fail.  */
5506   if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5507     return src;
5508   else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5509     return dest;
5510   else
5511     /* Convert this into a field assignment operation, if possible.  */
5512     return make_field_assignment (x);
5513 }
5514 
5515 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5516    result.  */
5517 
5518 static rtx
simplify_logical(rtx x)5519 simplify_logical (rtx x)
5520 {
5521   enum machine_mode mode = GET_MODE (x);
5522   rtx op0 = XEXP (x, 0);
5523   rtx op1 = XEXP (x, 1);
5524 
5525   switch (GET_CODE (x))
5526     {
5527     case AND:
5528       /* We can call simplify_and_const_int only if we don't lose
5529 	 any (sign) bits when converting INTVAL (op1) to
5530 	 "unsigned HOST_WIDE_INT".  */
5531       if (GET_CODE (op1) == CONST_INT
5532 	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5533 	      || INTVAL (op1) > 0))
5534 	{
5535 	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5536 	  if (GET_CODE (x) != AND)
5537 	    return x;
5538 
5539 	  op0 = XEXP (x, 0);
5540 	  op1 = XEXP (x, 1);
5541 	}
5542 
5543       /* If we have any of (and (ior A B) C) or (and (xor A B) C),
5544 	 apply the distributive law and then the inverse distributive
5545 	 law to see if things simplify.  */
5546       if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5547 	{
5548 	  rtx result = distribute_and_simplify_rtx (x, 0);
5549 	  if (result)
5550 	    return result;
5551 	}
5552       if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5553 	{
5554 	  rtx result = distribute_and_simplify_rtx (x, 1);
5555 	  if (result)
5556 	    return result;
5557 	}
5558       break;
5559 
5560     case IOR:
5561       /* If we have (ior (and A B) C), apply the distributive law and then
5562 	 the inverse distributive law to see if things simplify.  */
5563 
5564       if (GET_CODE (op0) == AND)
5565 	{
5566 	  rtx result = distribute_and_simplify_rtx (x, 0);
5567 	  if (result)
5568 	    return result;
5569 	}
5570 
5571       if (GET_CODE (op1) == AND)
5572 	{
5573 	  rtx result = distribute_and_simplify_rtx (x, 1);
5574 	  if (result)
5575 	    return result;
5576 	}
5577       break;
5578 
5579     default:
5580       gcc_unreachable ();
5581     }
5582 
5583   return x;
5584 }
5585 
5586 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5587    operations" because they can be replaced with two more basic operations.
5588    ZERO_EXTEND is also considered "compound" because it can be replaced with
5589    an AND operation, which is simpler, though only one operation.
5590 
5591    The function expand_compound_operation is called with an rtx expression
5592    and will convert it to the appropriate shifts and AND operations,
5593    simplifying at each stage.
5594 
5595    The function make_compound_operation is called to convert an expression
5596    consisting of shifts and ANDs into the equivalent compound expression.
5597    It is the inverse of this function, loosely speaking.  */
5598 
5599 static rtx
expand_compound_operation(rtx x)5600 expand_compound_operation (rtx x)
5601 {
5602   unsigned HOST_WIDE_INT pos = 0, len;
5603   int unsignedp = 0;
5604   unsigned int modewidth;
5605   rtx tem;
5606 
5607   switch (GET_CODE (x))
5608     {
5609     case ZERO_EXTEND:
5610       unsignedp = 1;
5611     case SIGN_EXTEND:
5612       /* We can't necessarily use a const_int for a multiword mode;
5613 	 it depends on implicitly extending the value.
5614 	 Since we don't know the right way to extend it,
5615 	 we can't tell whether the implicit way is right.
5616 
5617 	 Even for a mode that is no wider than a const_int,
5618 	 we can't win, because we need to sign extend one of its bits through
5619 	 the rest of it, and we don't know which bit.  */
5620       if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5621 	return x;
5622 
5623       /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5624 	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5625 	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5626 	 reloaded. If not for that, MEM's would very rarely be safe.
5627 
5628 	 Reject MODEs bigger than a word, because we might not be able
5629 	 to reference a two-register group starting with an arbitrary register
5630 	 (and currently gen_lowpart might crash for a SUBREG).  */
5631 
5632       if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5633 	return x;
5634 
5635       /* Reject MODEs that aren't scalar integers because turning vector
5636 	 or complex modes into shifts causes problems.  */
5637 
5638       if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5639 	return x;
5640 
5641       len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5642       /* If the inner object has VOIDmode (the only way this can happen
5643 	 is if it is an ASM_OPERANDS), we can't do anything since we don't
5644 	 know how much masking to do.  */
5645       if (len == 0)
5646 	return x;
5647 
5648       break;
5649 
5650     case ZERO_EXTRACT:
5651       unsignedp = 1;
5652 
5653       /* ... fall through ...  */
5654 
5655     case SIGN_EXTRACT:
5656       /* If the operand is a CLOBBER, just return it.  */
5657       if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5658 	return XEXP (x, 0);
5659 
5660       if (GET_CODE (XEXP (x, 1)) != CONST_INT
5661 	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5662 	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5663 	return x;
5664 
5665       /* Reject MODEs that aren't scalar integers because turning vector
5666 	 or complex modes into shifts causes problems.  */
5667 
5668       if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5669 	return x;
5670 
5671       len = INTVAL (XEXP (x, 1));
5672       pos = INTVAL (XEXP (x, 2));
5673 
5674       /* This should stay within the object being extracted, fail otherwise.  */
5675       if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5676 	return x;
5677 
5678       if (BITS_BIG_ENDIAN)
5679 	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5680 
5681       break;
5682 
5683     default:
5684       return x;
5685     }
5686   /* Convert sign extension to zero extension, if we know that the high
5687      bit is not set, as this is easier to optimize.  It will be converted
5688      back to cheaper alternative in make_extraction.  */
5689   if (GET_CODE (x) == SIGN_EXTEND
5690       && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5691 	  && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5692 		& ~(((unsigned HOST_WIDE_INT)
5693 		      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5694 		     >> 1))
5695 	       == 0)))
5696     {
5697       rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5698       rtx temp2 = expand_compound_operation (temp);
5699 
5700       /* Make sure this is a profitable operation.  */
5701       if (rtx_cost (x, SET) > rtx_cost (temp2, SET))
5702        return temp2;
5703       else if (rtx_cost (x, SET) > rtx_cost (temp, SET))
5704        return temp;
5705       else
5706        return x;
5707     }
5708 
5709   /* We can optimize some special cases of ZERO_EXTEND.  */
5710   if (GET_CODE (x) == ZERO_EXTEND)
5711     {
5712       /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5713 	 know that the last value didn't have any inappropriate bits
5714 	 set.  */
5715       if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5716 	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5717 	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5718 	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5719 	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5720 	return XEXP (XEXP (x, 0), 0);
5721 
5722       /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5723       if (GET_CODE (XEXP (x, 0)) == SUBREG
5724 	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5725 	  && subreg_lowpart_p (XEXP (x, 0))
5726 	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5727 	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5728 	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5729 	return SUBREG_REG (XEXP (x, 0));
5730 
5731       /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5732 	 is a comparison and STORE_FLAG_VALUE permits.  This is like
5733 	 the first case, but it works even when GET_MODE (x) is larger
5734 	 than HOST_WIDE_INT.  */
5735       if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5736 	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5737 	  && COMPARISON_P (XEXP (XEXP (x, 0), 0))
5738 	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5739 	      <= HOST_BITS_PER_WIDE_INT)
5740 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5741 	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5742 	return XEXP (XEXP (x, 0), 0);
5743 
5744       /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5745       if (GET_CODE (XEXP (x, 0)) == SUBREG
5746 	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5747 	  && subreg_lowpart_p (XEXP (x, 0))
5748 	  && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
5749 	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5750 	      <= HOST_BITS_PER_WIDE_INT)
5751 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5752 	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5753 	return SUBREG_REG (XEXP (x, 0));
5754 
5755     }
5756 
5757   /* If we reach here, we want to return a pair of shifts.  The inner
5758      shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5759      shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5760      logical depending on the value of UNSIGNEDP.
5761 
5762      If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5763      converted into an AND of a shift.
5764 
5765      We must check for the case where the left shift would have a negative
5766      count.  This can happen in a case like (x >> 31) & 255 on machines
5767      that can't shift by a constant.  On those machines, we would first
5768      combine the shift with the AND to produce a variable-position
5769      extraction.  Then the constant of 31 would be substituted in to produce
5770      a such a position.  */
5771 
5772   modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5773   if (modewidth + len >= pos)
5774     {
5775       enum machine_mode mode = GET_MODE (x);
5776       tem = gen_lowpart (mode, XEXP (x, 0));
5777       if (!tem || GET_CODE (tem) == CLOBBER)
5778 	return x;
5779       tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
5780 				  tem, modewidth - pos - len);
5781       tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5782 				  mode, tem, modewidth - len);
5783     }
5784   else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5785     tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5786 				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5787 							GET_MODE (x),
5788 							XEXP (x, 0), pos),
5789 				  ((HOST_WIDE_INT) 1 << len) - 1);
5790   else
5791     /* Any other cases we can't handle.  */
5792     return x;
5793 
5794   /* If we couldn't do this for some reason, return the original
5795      expression.  */
5796   if (GET_CODE (tem) == CLOBBER)
5797     return x;
5798 
5799   return tem;
5800 }
5801 
5802 /* X is a SET which contains an assignment of one object into
5803    a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5804    or certain SUBREGS). If possible, convert it into a series of
5805    logical operations.
5806 
5807    We half-heartedly support variable positions, but do not at all
5808    support variable lengths.  */
5809 
5810 static rtx
expand_field_assignment(rtx x)5811 expand_field_assignment (rtx x)
5812 {
5813   rtx inner;
5814   rtx pos;			/* Always counts from low bit.  */
5815   int len;
5816   rtx mask, cleared, masked;
5817   enum machine_mode compute_mode;
5818 
5819   /* Loop until we find something we can't simplify.  */
5820   while (1)
5821     {
5822       if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5823 	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5824 	{
5825 	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5826 	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5827 	  pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5828 	}
5829       else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5830 	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5831 	{
5832 	  inner = XEXP (SET_DEST (x), 0);
5833 	  len = INTVAL (XEXP (SET_DEST (x), 1));
5834 	  pos = XEXP (SET_DEST (x), 2);
5835 
5836 	  /* A constant position should stay within the width of INNER.  */
5837 	  if (GET_CODE (pos) == CONST_INT
5838 	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5839 	    break;
5840 
5841 	  if (BITS_BIG_ENDIAN)
5842 	    {
5843 	      if (GET_CODE (pos) == CONST_INT)
5844 		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5845 			       - INTVAL (pos));
5846 	      else if (GET_CODE (pos) == MINUS
5847 		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5848 		       && (INTVAL (XEXP (pos, 1))
5849 			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5850 		/* If position is ADJUST - X, new position is X.  */
5851 		pos = XEXP (pos, 0);
5852 	      else
5853 		pos = simplify_gen_binary (MINUS, GET_MODE (pos),
5854 					   GEN_INT (GET_MODE_BITSIZE (
5855 						    GET_MODE (inner))
5856 						    - len),
5857 					   pos);
5858 	    }
5859 	}
5860 
5861       /* A SUBREG between two modes that occupy the same numbers of words
5862 	 can be done by moving the SUBREG to the source.  */
5863       else if (GET_CODE (SET_DEST (x)) == SUBREG
5864 	       /* We need SUBREGs to compute nonzero_bits properly.  */
5865 	       && nonzero_sign_valid
5866 	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5867 		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5868 		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5869 			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5870 	{
5871 	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5872 			   gen_lowpart
5873 			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
5874 			    SET_SRC (x)));
5875 	  continue;
5876 	}
5877       else
5878 	break;
5879 
5880       while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5881 	inner = SUBREG_REG (inner);
5882 
5883       compute_mode = GET_MODE (inner);
5884 
5885       /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
5886       if (! SCALAR_INT_MODE_P (compute_mode))
5887 	{
5888 	  enum machine_mode imode;
5889 
5890 	  /* Don't do anything for vector or complex integral types.  */
5891 	  if (! FLOAT_MODE_P (compute_mode))
5892 	    break;
5893 
5894 	  /* Try to find an integral mode to pun with.  */
5895 	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5896 	  if (imode == BLKmode)
5897 	    break;
5898 
5899 	  compute_mode = imode;
5900 	  inner = gen_lowpart (imode, inner);
5901 	}
5902 
5903       /* Compute a mask of LEN bits, if we can do this on the host machine.  */
5904       if (len >= HOST_BITS_PER_WIDE_INT)
5905 	break;
5906 
5907       /* Now compute the equivalent expression.  Make a copy of INNER
5908 	 for the SET_DEST in case it is a MEM into which we will substitute;
5909 	 we don't want shared RTL in that case.  */
5910       mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5911       cleared = simplify_gen_binary (AND, compute_mode,
5912 				     simplify_gen_unary (NOT, compute_mode,
5913 				       simplify_gen_binary (ASHIFT,
5914 							    compute_mode,
5915 							    mask, pos),
5916 				       compute_mode),
5917 				     inner);
5918       masked = simplify_gen_binary (ASHIFT, compute_mode,
5919 				    simplify_gen_binary (
5920 				      AND, compute_mode,
5921 				      gen_lowpart (compute_mode, SET_SRC (x)),
5922 				      mask),
5923 				    pos);
5924 
5925       x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5926 		       simplify_gen_binary (IOR, compute_mode,
5927 					    cleared, masked));
5928     }
5929 
5930   return x;
5931 }
5932 
5933 /* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5934    it is an RTX that represents a variable starting position; otherwise,
5935    POS is the (constant) starting bit position (counted from the LSB).
5936 
5937    UNSIGNEDP is nonzero for an unsigned reference and zero for a
5938    signed reference.
5939 
5940    IN_DEST is nonzero if this is a reference in the destination of a
5941    SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
5942    a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5943    be used.
5944 
5945    IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
5946    ZERO_EXTRACT should be built even for bits starting at bit 0.
5947 
5948    MODE is the desired mode of the result (if IN_DEST == 0).
5949 
5950    The result is an RTX for the extraction or NULL_RTX if the target
5951    can't handle it.  */
5952 
5953 static rtx
make_extraction(enum machine_mode mode,rtx inner,HOST_WIDE_INT pos,rtx pos_rtx,unsigned HOST_WIDE_INT len,int unsignedp,int in_dest,int in_compare)5954 make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
5955 		 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
5956 		 int in_dest, int in_compare)
5957 {
5958   /* This mode describes the size of the storage area
5959      to fetch the overall value from.  Within that, we
5960      ignore the POS lowest bits, etc.  */
5961   enum machine_mode is_mode = GET_MODE (inner);
5962   enum machine_mode inner_mode;
5963   enum machine_mode wanted_inner_mode;
5964   enum machine_mode wanted_inner_reg_mode = word_mode;
5965   enum machine_mode pos_mode = word_mode;
5966   enum machine_mode extraction_mode = word_mode;
5967   enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5968   rtx new = 0;
5969   rtx orig_pos_rtx = pos_rtx;
5970   HOST_WIDE_INT orig_pos;
5971 
5972   if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5973     {
5974       /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5975 	 consider just the QI as the memory to extract from.
5976 	 The subreg adds or removes high bits; its mode is
5977 	 irrelevant to the meaning of this extraction,
5978 	 since POS and LEN count from the lsb.  */
5979       if (MEM_P (SUBREG_REG (inner)))
5980 	is_mode = GET_MODE (SUBREG_REG (inner));
5981       inner = SUBREG_REG (inner);
5982     }
5983   else if (GET_CODE (inner) == ASHIFT
5984 	   && GET_CODE (XEXP (inner, 1)) == CONST_INT
5985 	   && pos_rtx == 0 && pos == 0
5986 	   && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
5987     {
5988       /* We're extracting the least significant bits of an rtx
5989 	 (ashift X (const_int C)), where LEN > C.  Extract the
5990 	 least significant (LEN - C) bits of X, giving an rtx
5991 	 whose mode is MODE, then shift it left C times.  */
5992       new = make_extraction (mode, XEXP (inner, 0),
5993 			     0, 0, len - INTVAL (XEXP (inner, 1)),
5994 			     unsignedp, in_dest, in_compare);
5995       if (new != 0)
5996 	return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
5997     }
5998 
5999   inner_mode = GET_MODE (inner);
6000 
6001   if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6002     pos = INTVAL (pos_rtx), pos_rtx = 0;
6003 
6004   /* See if this can be done without an extraction.  We never can if the
6005      width of the field is not the same as that of some integer mode. For
6006      registers, we can only avoid the extraction if the position is at the
6007      low-order bit and this is either not in the destination or we have the
6008      appropriate STRICT_LOW_PART operation available.
6009 
6010      For MEM, we can avoid an extract if the field starts on an appropriate
6011      boundary and we can change the mode of the memory reference.  */
6012 
6013   if (tmode != BLKmode
6014       && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6015 	   && !MEM_P (inner)
6016 	   && (inner_mode == tmode
6017 	       || !REG_P (inner)
6018 	       || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
6019 					 GET_MODE_BITSIZE (inner_mode))
6020 	       || reg_truncated_to_mode (tmode, inner))
6021 	   && (! in_dest
6022 	       || (REG_P (inner)
6023 		   && have_insn_for (STRICT_LOW_PART, tmode))))
6024 	  || (MEM_P (inner) && pos_rtx == 0
6025 	      && (pos
6026 		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6027 		     : BITS_PER_UNIT)) == 0
6028 	      /* We can't do this if we are widening INNER_MODE (it
6029 		 may not be aligned, for one thing).  */
6030 	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6031 	      && (inner_mode == tmode
6032 		  || (! mode_dependent_address_p (XEXP (inner, 0))
6033 		      && ! MEM_VOLATILE_P (inner))))))
6034     {
6035       /* If INNER is a MEM, make a new MEM that encompasses just the desired
6036 	 field.  If the original and current mode are the same, we need not
6037 	 adjust the offset.  Otherwise, we do if bytes big endian.
6038 
6039 	 If INNER is not a MEM, get a piece consisting of just the field
6040 	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
6041 
6042       if (MEM_P (inner))
6043 	{
6044 	  HOST_WIDE_INT offset;
6045 
6046 	  /* POS counts from lsb, but make OFFSET count in memory order.  */
6047 	  if (BYTES_BIG_ENDIAN)
6048 	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6049 	  else
6050 	    offset = pos / BITS_PER_UNIT;
6051 
6052 	  new = adjust_address_nv (inner, tmode, offset);
6053 	}
6054       else if (REG_P (inner))
6055 	{
6056 	  if (tmode != inner_mode)
6057 	    {
6058 	      /* We can't call gen_lowpart in a DEST since we
6059 		 always want a SUBREG (see below) and it would sometimes
6060 		 return a new hard register.  */
6061 	      if (pos || in_dest)
6062 		{
6063 		  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6064 
6065 		  if (WORDS_BIG_ENDIAN
6066 		      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6067 		    final_word = ((GET_MODE_SIZE (inner_mode)
6068 				   - GET_MODE_SIZE (tmode))
6069 				  / UNITS_PER_WORD) - final_word;
6070 
6071 		  final_word *= UNITS_PER_WORD;
6072 		  if (BYTES_BIG_ENDIAN &&
6073 		      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6074 		    final_word += (GET_MODE_SIZE (inner_mode)
6075 				   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6076 
6077 		  /* Avoid creating invalid subregs, for example when
6078 		     simplifying (x>>32)&255.  */
6079 		  if (!validate_subreg (tmode, inner_mode, inner, final_word))
6080 		    return NULL_RTX;
6081 
6082 		  new = gen_rtx_SUBREG (tmode, inner, final_word);
6083 		}
6084 	      else
6085 		new = gen_lowpart (tmode, inner);
6086 	    }
6087 	  else
6088 	    new = inner;
6089 	}
6090       else
6091 	new = force_to_mode (inner, tmode,
6092 			     len >= HOST_BITS_PER_WIDE_INT
6093 			     ? ~(unsigned HOST_WIDE_INT) 0
6094 			     : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6095 			     0);
6096 
6097       /* If this extraction is going into the destination of a SET,
6098 	 make a STRICT_LOW_PART unless we made a MEM.  */
6099 
6100       if (in_dest)
6101 	return (MEM_P (new) ? new
6102 		: (GET_CODE (new) != SUBREG
6103 		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6104 		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6105 
6106       if (mode == tmode)
6107 	return new;
6108 
6109       if (GET_CODE (new) == CONST_INT)
6110 	return gen_int_mode (INTVAL (new), mode);
6111 
6112       /* If we know that no extraneous bits are set, and that the high
6113 	 bit is not set, convert the extraction to the cheaper of
6114 	 sign and zero extension, that are equivalent in these cases.  */
6115       if (flag_expensive_optimizations
6116 	  && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6117 	      && ((nonzero_bits (new, tmode)
6118 		   & ~(((unsigned HOST_WIDE_INT)
6119 			GET_MODE_MASK (tmode))
6120 		       >> 1))
6121 		  == 0)))
6122 	{
6123 	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6124 	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6125 
6126 	  /* Prefer ZERO_EXTENSION, since it gives more information to
6127 	     backends.  */
6128 	  if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6129 	    return temp;
6130 	  return temp1;
6131 	}
6132 
6133       /* Otherwise, sign- or zero-extend unless we already are in the
6134 	 proper mode.  */
6135 
6136       return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6137 			     mode, new));
6138     }
6139 
6140   /* Unless this is a COMPARE or we have a funny memory reference,
6141      don't do anything with zero-extending field extracts starting at
6142      the low-order bit since they are simple AND operations.  */
6143   if (pos_rtx == 0 && pos == 0 && ! in_dest
6144       && ! in_compare && unsignedp)
6145     return 0;
6146 
6147   /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6148      if the position is not a constant and the length is not 1.  In all
6149      other cases, we would only be going outside our object in cases when
6150      an original shift would have been undefined.  */
6151   if (MEM_P (inner)
6152       && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6153 	  || (pos_rtx != 0 && len != 1)))
6154     return 0;
6155 
6156   /* Get the mode to use should INNER not be a MEM, the mode for the position,
6157      and the mode for the result.  */
6158   if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6159     {
6160       wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6161       pos_mode = mode_for_extraction (EP_insv, 2);
6162       extraction_mode = mode_for_extraction (EP_insv, 3);
6163     }
6164 
6165   if (! in_dest && unsignedp
6166       && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6167     {
6168       wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6169       pos_mode = mode_for_extraction (EP_extzv, 3);
6170       extraction_mode = mode_for_extraction (EP_extzv, 0);
6171     }
6172 
6173   if (! in_dest && ! unsignedp
6174       && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6175     {
6176       wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6177       pos_mode = mode_for_extraction (EP_extv, 3);
6178       extraction_mode = mode_for_extraction (EP_extv, 0);
6179     }
6180 
6181   /* Never narrow an object, since that might not be safe.  */
6182 
6183   if (mode != VOIDmode
6184       && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6185     extraction_mode = mode;
6186 
6187   if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6188       && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6189     pos_mode = GET_MODE (pos_rtx);
6190 
6191   /* If this is not from memory, the desired mode is the preferred mode
6192      for an extraction pattern's first input operand, or word_mode if there
6193      is none.  */
6194   if (!MEM_P (inner))
6195     wanted_inner_mode = wanted_inner_reg_mode;
6196   else
6197     {
6198       /* Be careful not to go beyond the extracted object and maintain the
6199 	 natural alignment of the memory.  */
6200       wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
6201       while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
6202 	     > GET_MODE_BITSIZE (wanted_inner_mode))
6203 	{
6204 	  wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode);
6205 	  gcc_assert (wanted_inner_mode != VOIDmode);
6206 	}
6207 
6208       /* If we have to change the mode of memory and cannot, the desired mode
6209 	 is EXTRACTION_MODE.  */
6210       if (inner_mode != wanted_inner_mode
6211 	  && (mode_dependent_address_p (XEXP (inner, 0))
6212 	      || MEM_VOLATILE_P (inner)
6213 	      || pos_rtx))
6214 	wanted_inner_mode = extraction_mode;
6215     }
6216 
6217   orig_pos = pos;
6218 
6219   if (BITS_BIG_ENDIAN)
6220     {
6221       /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6222 	 BITS_BIG_ENDIAN style.  If position is constant, compute new
6223 	 position.  Otherwise, build subtraction.
6224 	 Note that POS is relative to the mode of the original argument.
6225 	 If it's a MEM we need to recompute POS relative to that.
6226 	 However, if we're extracting from (or inserting into) a register,
6227 	 we want to recompute POS relative to wanted_inner_mode.  */
6228       int width = (MEM_P (inner)
6229 		   ? GET_MODE_BITSIZE (is_mode)
6230 		   : GET_MODE_BITSIZE (wanted_inner_mode));
6231 
6232       if (pos_rtx == 0)
6233 	pos = width - len - pos;
6234       else
6235 	pos_rtx
6236 	  = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6237       /* POS may be less than 0 now, but we check for that below.
6238 	 Note that it can only be less than 0 if !MEM_P (inner).  */
6239     }
6240 
6241   /* If INNER has a wider mode, and this is a constant extraction, try to
6242      make it smaller and adjust the byte to point to the byte containing
6243      the value.  */
6244   if (wanted_inner_mode != VOIDmode
6245       && inner_mode != wanted_inner_mode
6246       && ! pos_rtx
6247       && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6248       && MEM_P (inner)
6249       && ! mode_dependent_address_p (XEXP (inner, 0))
6250       && ! MEM_VOLATILE_P (inner))
6251     {
6252       int offset = 0;
6253 
6254       /* The computations below will be correct if the machine is big
6255 	 endian in both bits and bytes or little endian in bits and bytes.
6256 	 If it is mixed, we must adjust.  */
6257 
6258       /* If bytes are big endian and we had a paradoxical SUBREG, we must
6259 	 adjust OFFSET to compensate.  */
6260       if (BYTES_BIG_ENDIAN
6261 	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6262 	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6263 
6264       /* We can now move to the desired byte.  */
6265       offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
6266 		* GET_MODE_SIZE (wanted_inner_mode);
6267       pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6268 
6269       if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6270 	  && is_mode != wanted_inner_mode)
6271 	offset = (GET_MODE_SIZE (is_mode)
6272 		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6273 
6274       inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6275     }
6276 
6277   /* If INNER is not memory, we can always get it into the proper mode.  If we
6278      are changing its mode, POS must be a constant and smaller than the size
6279      of the new mode.  */
6280   else if (!MEM_P (inner))
6281     {
6282       if (GET_MODE (inner) != wanted_inner_mode
6283 	  && (pos_rtx != 0
6284 	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6285 	return 0;
6286 
6287       if (orig_pos < 0)
6288 	return 0;
6289 
6290       inner = force_to_mode (inner, wanted_inner_mode,
6291 			     pos_rtx
6292 			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6293 			     ? ~(unsigned HOST_WIDE_INT) 0
6294 			     : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6295 				<< orig_pos),
6296 			     0);
6297     }
6298 
6299   /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6300      have to zero extend.  Otherwise, we can just use a SUBREG.  */
6301   if (pos_rtx != 0
6302       && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6303     {
6304       rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6305 
6306       /* If we know that no extraneous bits are set, and that the high
6307 	 bit is not set, convert extraction to cheaper one - either
6308 	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6309 	 cases.  */
6310       if (flag_expensive_optimizations
6311 	  && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6312 	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6313 		   & ~(((unsigned HOST_WIDE_INT)
6314 			GET_MODE_MASK (GET_MODE (pos_rtx)))
6315 		       >> 1))
6316 		  == 0)))
6317 	{
6318 	  rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6319 
6320 	  /* Prefer ZERO_EXTENSION, since it gives more information to
6321 	     backends.  */
6322 	  if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6323 	    temp = temp1;
6324 	}
6325       pos_rtx = temp;
6326     }
6327   else if (pos_rtx != 0
6328 	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6329     pos_rtx = gen_lowpart (pos_mode, pos_rtx);
6330 
6331   /* Make POS_RTX unless we already have it and it is correct.  If we don't
6332      have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6333      be a CONST_INT.  */
6334   if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6335     pos_rtx = orig_pos_rtx;
6336 
6337   else if (pos_rtx == 0)
6338     pos_rtx = GEN_INT (pos);
6339 
6340   /* Make the required operation.  See if we can use existing rtx.  */
6341   new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6342 			 extraction_mode, inner, GEN_INT (len), pos_rtx);
6343   if (! in_dest)
6344     new = gen_lowpart (mode, new);
6345 
6346   return new;
6347 }
6348 
6349 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6350    with any other operations in X.  Return X without that shift if so.  */
6351 
6352 static rtx
extract_left_shift(rtx x,int count)6353 extract_left_shift (rtx x, int count)
6354 {
6355   enum rtx_code code = GET_CODE (x);
6356   enum machine_mode mode = GET_MODE (x);
6357   rtx tem;
6358 
6359   switch (code)
6360     {
6361     case ASHIFT:
6362       /* This is the shift itself.  If it is wide enough, we will return
6363 	 either the value being shifted if the shift count is equal to
6364 	 COUNT or a shift for the difference.  */
6365       if (GET_CODE (XEXP (x, 1)) == CONST_INT
6366 	  && INTVAL (XEXP (x, 1)) >= count)
6367 	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6368 				     INTVAL (XEXP (x, 1)) - count);
6369       break;
6370 
6371     case NEG:  case NOT:
6372       if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6373 	return simplify_gen_unary (code, mode, tem, mode);
6374 
6375       break;
6376 
6377     case PLUS:  case IOR:  case XOR:  case AND:
6378       /* If we can safely shift this constant and we find the inner shift,
6379 	 make a new operation.  */
6380       if (GET_CODE (XEXP (x, 1)) == CONST_INT
6381 	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6382 	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6383 	return simplify_gen_binary (code, mode, tem,
6384 				    GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6385 
6386       break;
6387 
6388     default:
6389       break;
6390     }
6391 
6392   return 0;
6393 }
6394 
6395 /* Look at the expression rooted at X.  Look for expressions
6396    equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6397    Form these expressions.
6398 
6399    Return the new rtx, usually just X.
6400 
6401    Also, for machines like the VAX that don't have logical shift insns,
6402    try to convert logical to arithmetic shift operations in cases where
6403    they are equivalent.  This undoes the canonicalizations to logical
6404    shifts done elsewhere.
6405 
6406    We try, as much as possible, to re-use rtl expressions to save memory.
6407 
6408    IN_CODE says what kind of expression we are processing.  Normally, it is
6409    SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6410    being kludges), it is MEM.  When processing the arguments of a comparison
6411    or a COMPARE against zero, it is COMPARE.  */
6412 
6413 static rtx
make_compound_operation(rtx x,enum rtx_code in_code)6414 make_compound_operation (rtx x, enum rtx_code in_code)
6415 {
6416   enum rtx_code code = GET_CODE (x);
6417   enum machine_mode mode = GET_MODE (x);
6418   int mode_width = GET_MODE_BITSIZE (mode);
6419   rtx rhs, lhs;
6420   enum rtx_code next_code;
6421   int i;
6422   rtx new = 0;
6423   rtx tem;
6424   const char *fmt;
6425 
6426   /* Select the code to be used in recursive calls.  Once we are inside an
6427      address, we stay there.  If we have a comparison, set to COMPARE,
6428      but once inside, go back to our default of SET.  */
6429 
6430   next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6431 	       : ((code == COMPARE || COMPARISON_P (x))
6432 		  && XEXP (x, 1) == const0_rtx) ? COMPARE
6433 	       : in_code == COMPARE ? SET : in_code);
6434 
6435   /* Process depending on the code of this operation.  If NEW is set
6436      nonzero, it will be returned.  */
6437 
6438   switch (code)
6439     {
6440     case ASHIFT:
6441       /* Convert shifts by constants into multiplications if inside
6442 	 an address.  */
6443       if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6444 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6445 	  && INTVAL (XEXP (x, 1)) >= 0)
6446 	{
6447 	  new = make_compound_operation (XEXP (x, 0), next_code);
6448 	  new = gen_rtx_MULT (mode, new,
6449 			      GEN_INT ((HOST_WIDE_INT) 1
6450 				       << INTVAL (XEXP (x, 1))));
6451 	}
6452       break;
6453 
6454     case AND:
6455       /* If the second operand is not a constant, we can't do anything
6456 	 with it.  */
6457       if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6458 	break;
6459 
6460       /* If the constant is a power of two minus one and the first operand
6461 	 is a logical right shift, make an extraction.  */
6462       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6463 	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6464 	{
6465 	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6466 	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6467 				 0, in_code == COMPARE);
6468 	}
6469 
6470       /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6471       else if (GET_CODE (XEXP (x, 0)) == SUBREG
6472 	       && subreg_lowpart_p (XEXP (x, 0))
6473 	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6474 	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6475 	{
6476 	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6477 					 next_code);
6478 	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6479 				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6480 				 0, in_code == COMPARE);
6481 	}
6482       /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6483       else if ((GET_CODE (XEXP (x, 0)) == XOR
6484 		|| GET_CODE (XEXP (x, 0)) == IOR)
6485 	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6486 	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6487 	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6488 	{
6489 	  /* Apply the distributive law, and then try to make extractions.  */
6490 	  new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6491 				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6492 					     XEXP (x, 1)),
6493 				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6494 					     XEXP (x, 1)));
6495 	  new = make_compound_operation (new, in_code);
6496 	}
6497 
6498       /* If we are have (and (rotate X C) M) and C is larger than the number
6499 	 of bits in M, this is an extraction.  */
6500 
6501       else if (GET_CODE (XEXP (x, 0)) == ROTATE
6502 	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6503 	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6504 	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6505 	{
6506 	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6507 	  new = make_extraction (mode, new,
6508 				 (GET_MODE_BITSIZE (mode)
6509 				  - INTVAL (XEXP (XEXP (x, 0), 1))),
6510 				 NULL_RTX, i, 1, 0, in_code == COMPARE);
6511 	}
6512 
6513       /* On machines without logical shifts, if the operand of the AND is
6514 	 a logical shift and our mask turns off all the propagated sign
6515 	 bits, we can replace the logical shift with an arithmetic shift.  */
6516       else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6517 	       && !have_insn_for (LSHIFTRT, mode)
6518 	       && have_insn_for (ASHIFTRT, mode)
6519 	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6520 	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6521 	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6522 	       && mode_width <= HOST_BITS_PER_WIDE_INT)
6523 	{
6524 	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6525 
6526 	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6527 	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6528 	    SUBST (XEXP (x, 0),
6529 		   gen_rtx_ASHIFTRT (mode,
6530 				     make_compound_operation
6531 				     (XEXP (XEXP (x, 0), 0), next_code),
6532 				     XEXP (XEXP (x, 0), 1)));
6533 	}
6534 
6535       /* If the constant is one less than a power of two, this might be
6536 	 representable by an extraction even if no shift is present.
6537 	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6538 	 we are in a COMPARE.  */
6539       else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6540 	new = make_extraction (mode,
6541 			       make_compound_operation (XEXP (x, 0),
6542 							next_code),
6543 			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6544 
6545       /* If we are in a comparison and this is an AND with a power of two,
6546 	 convert this into the appropriate bit extract.  */
6547       else if (in_code == COMPARE
6548 	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6549 	new = make_extraction (mode,
6550 			       make_compound_operation (XEXP (x, 0),
6551 							next_code),
6552 			       i, NULL_RTX, 1, 1, 0, 1);
6553 
6554       break;
6555 
6556     case LSHIFTRT:
6557       /* If the sign bit is known to be zero, replace this with an
6558 	 arithmetic shift.  */
6559       if (have_insn_for (ASHIFTRT, mode)
6560 	  && ! have_insn_for (LSHIFTRT, mode)
6561 	  && mode_width <= HOST_BITS_PER_WIDE_INT
6562 	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6563 	{
6564 	  new = gen_rtx_ASHIFTRT (mode,
6565 				  make_compound_operation (XEXP (x, 0),
6566 							   next_code),
6567 				  XEXP (x, 1));
6568 	  break;
6569 	}
6570 
6571       /* ... fall through ...  */
6572 
6573     case ASHIFTRT:
6574       lhs = XEXP (x, 0);
6575       rhs = XEXP (x, 1);
6576 
6577       /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6578 	 this is a SIGN_EXTRACT.  */
6579       if (GET_CODE (rhs) == CONST_INT
6580 	  && GET_CODE (lhs) == ASHIFT
6581 	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6582 	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6583 	{
6584 	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6585 	  new = make_extraction (mode, new,
6586 				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6587 				 NULL_RTX, mode_width - INTVAL (rhs),
6588 				 code == LSHIFTRT, 0, in_code == COMPARE);
6589 	  break;
6590 	}
6591 
6592       /* See if we have operations between an ASHIFTRT and an ASHIFT.
6593 	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6594 	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6595 	 seem worth the effort; the case checked for occurs on Alpha.  */
6596 
6597       if (!OBJECT_P (lhs)
6598 	  && ! (GET_CODE (lhs) == SUBREG
6599 		&& (OBJECT_P (SUBREG_REG (lhs))))
6600 	  && GET_CODE (rhs) == CONST_INT
6601 	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6602 	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6603 	new = make_extraction (mode, make_compound_operation (new, next_code),
6604 			       0, NULL_RTX, mode_width - INTVAL (rhs),
6605 			       code == LSHIFTRT, 0, in_code == COMPARE);
6606 
6607       break;
6608 
6609     case SUBREG:
6610       /* Call ourselves recursively on the inner expression.  If we are
6611 	 narrowing the object and it has a different RTL code from
6612 	 what it originally did, do this SUBREG as a force_to_mode.  */
6613 
6614       tem = make_compound_operation (SUBREG_REG (x), in_code);
6615 
6616       {
6617 	rtx simplified;
6618 	simplified = simplify_subreg (GET_MODE (x), tem, GET_MODE (tem),
6619 				      SUBREG_BYTE (x));
6620 
6621 	if (simplified)
6622 	  tem = simplified;
6623 
6624 	if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6625 	    && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6626 	    && subreg_lowpart_p (x))
6627 	  {
6628 	    rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6629 				       0);
6630 
6631 	    /* If we have something other than a SUBREG, we might have
6632 	       done an expansion, so rerun ourselves.  */
6633 	    if (GET_CODE (newer) != SUBREG)
6634 	      newer = make_compound_operation (newer, in_code);
6635 
6636 	    return newer;
6637 	  }
6638 
6639 	if (simplified)
6640 	  return tem;
6641       }
6642       break;
6643 
6644     default:
6645       break;
6646     }
6647 
6648   if (new)
6649     {
6650       x = gen_lowpart (mode, new);
6651       code = GET_CODE (x);
6652     }
6653 
6654   /* Now recursively process each operand of this operation.  */
6655   fmt = GET_RTX_FORMAT (code);
6656   for (i = 0; i < GET_RTX_LENGTH (code); i++)
6657     if (fmt[i] == 'e')
6658       {
6659 	new = make_compound_operation (XEXP (x, i), next_code);
6660 	SUBST (XEXP (x, i), new);
6661       }
6662 
6663   /* If this is a commutative operation, the changes to the operands
6664      may have made it noncanonical.  */
6665   if (COMMUTATIVE_ARITH_P (x)
6666       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
6667     {
6668       tem = XEXP (x, 0);
6669       SUBST (XEXP (x, 0), XEXP (x, 1));
6670       SUBST (XEXP (x, 1), tem);
6671     }
6672 
6673   return x;
6674 }
6675 
6676 /* Given M see if it is a value that would select a field of bits
6677    within an item, but not the entire word.  Return -1 if not.
6678    Otherwise, return the starting position of the field, where 0 is the
6679    low-order bit.
6680 
6681    *PLEN is set to the length of the field.  */
6682 
6683 static int
get_pos_from_mask(unsigned HOST_WIDE_INT m,unsigned HOST_WIDE_INT * plen)6684 get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
6685 {
6686   /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6687   int pos = exact_log2 (m & -m);
6688   int len = 0;
6689 
6690   if (pos >= 0)
6691     /* Now shift off the low-order zero bits and see if we have a
6692        power of two minus 1.  */
6693     len = exact_log2 ((m >> pos) + 1);
6694 
6695   if (len <= 0)
6696     pos = -1;
6697 
6698   *plen = len;
6699   return pos;
6700 }
6701 
6702 /* If X refers to a register that equals REG in value, replace these
6703    references with REG.  */
6704 static rtx
canon_reg_for_combine(rtx x,rtx reg)6705 canon_reg_for_combine (rtx x, rtx reg)
6706 {
6707   rtx op0, op1, op2;
6708   const char *fmt;
6709   int i;
6710   bool copied;
6711 
6712   enum rtx_code code = GET_CODE (x);
6713   switch (GET_RTX_CLASS (code))
6714     {
6715     case RTX_UNARY:
6716       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
6717       if (op0 != XEXP (x, 0))
6718 	return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
6719 				   GET_MODE (reg));
6720       break;
6721 
6722     case RTX_BIN_ARITH:
6723     case RTX_COMM_ARITH:
6724       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
6725       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
6726       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6727 	return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
6728       break;
6729 
6730     case RTX_COMPARE:
6731     case RTX_COMM_COMPARE:
6732       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
6733       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
6734       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6735 	return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
6736 					GET_MODE (op0), op0, op1);
6737       break;
6738 
6739     case RTX_TERNARY:
6740     case RTX_BITFIELD_OPS:
6741       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
6742       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
6743       op2 = canon_reg_for_combine (XEXP (x, 2), reg);
6744       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
6745 	return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
6746 				     GET_MODE (op0), op0, op1, op2);
6747 
6748     case RTX_OBJ:
6749       if (REG_P (x))
6750 	{
6751 	  if (rtx_equal_p (get_last_value (reg), x)
6752 	      || rtx_equal_p (reg, get_last_value (x)))
6753 	    return reg;
6754 	  else
6755 	    break;
6756 	}
6757 
6758       /* fall through */
6759 
6760     default:
6761       fmt = GET_RTX_FORMAT (code);
6762       copied = false;
6763       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6764 	if (fmt[i] == 'e')
6765 	  {
6766 	    rtx op = canon_reg_for_combine (XEXP (x, i), reg);
6767 	    if (op != XEXP (x, i))
6768 	      {
6769 		if (!copied)
6770 		  {
6771 		    copied = true;
6772 		    x = copy_rtx (x);
6773 		  }
6774 		XEXP (x, i) = op;
6775 	      }
6776 	  }
6777 	else if (fmt[i] == 'E')
6778 	  {
6779 	    int j;
6780 	    for (j = 0; j < XVECLEN (x, i); j++)
6781 	      {
6782 		rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
6783 		if (op != XVECEXP (x, i, j))
6784 		  {
6785 		    if (!copied)
6786 		      {
6787 			copied = true;
6788 			x = copy_rtx (x);
6789 		      }
6790 		    XVECEXP (x, i, j) = op;
6791 		  }
6792 	      }
6793 	  }
6794 
6795       break;
6796     }
6797 
6798   return x;
6799 }
6800 
6801 /* Return X converted to MODE.  If the value is already truncated to
6802    MODE we can just return a subreg even though in the general case we
6803    would need an explicit truncation.  */
6804 
6805 static rtx
gen_lowpart_or_truncate(enum machine_mode mode,rtx x)6806 gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
6807 {
6808   if (GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (mode)
6809       || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
6810 				GET_MODE_BITSIZE (GET_MODE (x)))
6811       || (REG_P (x) && reg_truncated_to_mode (mode, x)))
6812     return gen_lowpart (mode, x);
6813   else
6814     return simplify_gen_unary (TRUNCATE, mode, x, GET_MODE (x));
6815 }
6816 
6817 /* See if X can be simplified knowing that we will only refer to it in
6818    MODE and will only refer to those bits that are nonzero in MASK.
6819    If other bits are being computed or if masking operations are done
6820    that select a superset of the bits in MASK, they can sometimes be
6821    ignored.
6822 
6823    Return a possibly simplified expression, but always convert X to
6824    MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6825 
6826    If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6827    are all off in X.  This is used when X will be complemented, by either
6828    NOT, NEG, or XOR.  */
6829 
6830 static rtx
force_to_mode(rtx x,enum machine_mode mode,unsigned HOST_WIDE_INT mask,int just_select)6831 force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
6832 	       int just_select)
6833 {
6834   enum rtx_code code = GET_CODE (x);
6835   int next_select = just_select || code == XOR || code == NOT || code == NEG;
6836   enum machine_mode op_mode;
6837   unsigned HOST_WIDE_INT fuller_mask, nonzero;
6838   rtx op0, op1, temp;
6839 
6840   /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6841      code below will do the wrong thing since the mode of such an
6842      expression is VOIDmode.
6843 
6844      Also do nothing if X is a CLOBBER; this can happen if X was
6845      the return value from a call to gen_lowpart.  */
6846   if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6847     return x;
6848 
6849   /* We want to perform the operation is its present mode unless we know
6850      that the operation is valid in MODE, in which case we do the operation
6851      in MODE.  */
6852   op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6853 	      && have_insn_for (code, mode))
6854 	     ? mode : GET_MODE (x));
6855 
6856   /* It is not valid to do a right-shift in a narrower mode
6857      than the one it came in with.  */
6858   if ((code == LSHIFTRT || code == ASHIFTRT)
6859       && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6860     op_mode = GET_MODE (x);
6861 
6862   /* Truncate MASK to fit OP_MODE.  */
6863   if (op_mode)
6864     mask &= GET_MODE_MASK (op_mode);
6865 
6866   /* When we have an arithmetic operation, or a shift whose count we
6867      do not know, we need to assume that all bits up to the highest-order
6868      bit in MASK will be needed.  This is how we form such a mask.  */
6869   if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
6870     fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
6871   else
6872     fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6873 		   - 1);
6874 
6875   /* Determine what bits of X are guaranteed to be (non)zero.  */
6876   nonzero = nonzero_bits (x, mode);
6877 
6878   /* If none of the bits in X are needed, return a zero.  */
6879   if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
6880     x = const0_rtx;
6881 
6882   /* If X is a CONST_INT, return a new one.  Do this here since the
6883      test below will fail.  */
6884   if (GET_CODE (x) == CONST_INT)
6885     {
6886       if (SCALAR_INT_MODE_P (mode))
6887 	return gen_int_mode (INTVAL (x) & mask, mode);
6888       else
6889 	{
6890 	  x = GEN_INT (INTVAL (x) & mask);
6891 	  return gen_lowpart_common (mode, x);
6892 	}
6893     }
6894 
6895   /* If X is narrower than MODE and we want all the bits in X's mode, just
6896      get X in the proper mode.  */
6897   if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6898       && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6899     return gen_lowpart (mode, x);
6900 
6901   switch (code)
6902     {
6903     case CLOBBER:
6904       /* If X is a (clobber (const_int)), return it since we know we are
6905 	 generating something that won't match.  */
6906       return x;
6907 
6908     case SIGN_EXTEND:
6909     case ZERO_EXTEND:
6910     case ZERO_EXTRACT:
6911     case SIGN_EXTRACT:
6912       x = expand_compound_operation (x);
6913       if (GET_CODE (x) != code)
6914 	return force_to_mode (x, mode, mask, next_select);
6915       break;
6916 
6917     case SUBREG:
6918       if (subreg_lowpart_p (x)
6919 	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6920 	     if the constant masks to zero all the bits the mode doesn't
6921 	     have.  */
6922 	  && ((GET_MODE_SIZE (GET_MODE (x))
6923 	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6924 	      || (0 == (mask
6925 			& GET_MODE_MASK (GET_MODE (x))
6926 			& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6927 	return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
6928       break;
6929 
6930     case AND:
6931       /* If this is an AND with a constant, convert it into an AND
6932 	 whose constant is the AND of that constant with MASK.  If it
6933 	 remains an AND of MASK, delete it since it is redundant.  */
6934 
6935       if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6936 	{
6937 	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6938 				      mask & INTVAL (XEXP (x, 1)));
6939 
6940 	  /* If X is still an AND, see if it is an AND with a mask that
6941 	     is just some low-order bits.  If so, and it is MASK, we don't
6942 	     need it.  */
6943 
6944 	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6945 	      && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
6946 		  == mask))
6947 	    x = XEXP (x, 0);
6948 
6949 	  /* If it remains an AND, try making another AND with the bits
6950 	     in the mode mask that aren't in MASK turned on.  If the
6951 	     constant in the AND is wide enough, this might make a
6952 	     cheaper constant.  */
6953 
6954 	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6955 	      && GET_MODE_MASK (GET_MODE (x)) != mask
6956 	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6957 	    {
6958 	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6959 				    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6960 	      int width = GET_MODE_BITSIZE (GET_MODE (x));
6961 	      rtx y;
6962 
6963 	      /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
6964 		 number, sign extend it.  */
6965 	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6966 		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6967 		cval |= (HOST_WIDE_INT) -1 << width;
6968 
6969 	      y = simplify_gen_binary (AND, GET_MODE (x),
6970 				       XEXP (x, 0), GEN_INT (cval));
6971 	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
6972 		x = y;
6973 	    }
6974 
6975 	  break;
6976 	}
6977 
6978       goto binop;
6979 
6980     case PLUS:
6981       /* In (and (plus FOO C1) M), if M is a mask that just turns off
6982 	 low-order bits (as in an alignment operation) and FOO is already
6983 	 aligned to that boundary, mask C1 to that boundary as well.
6984 	 This may eliminate that PLUS and, later, the AND.  */
6985 
6986       {
6987 	unsigned int width = GET_MODE_BITSIZE (mode);
6988 	unsigned HOST_WIDE_INT smask = mask;
6989 
6990 	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6991 	   number, sign extend it.  */
6992 
6993 	if (width < HOST_BITS_PER_WIDE_INT
6994 	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6995 	  smask |= (HOST_WIDE_INT) -1 << width;
6996 
6997 	if (GET_CODE (XEXP (x, 1)) == CONST_INT
6998 	    && exact_log2 (- smask) >= 0
6999 	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7000 	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7001 	  return force_to_mode (plus_constant (XEXP (x, 0),
7002 					       (INTVAL (XEXP (x, 1)) & smask)),
7003 				mode, smask, next_select);
7004       }
7005 
7006       /* ... fall through ...  */
7007 
7008     case MULT:
7009       /* For PLUS, MINUS and MULT, we need any bits less significant than the
7010 	 most significant bit in MASK since carries from those bits will
7011 	 affect the bits we are interested in.  */
7012       mask = fuller_mask;
7013       goto binop;
7014 
7015     case MINUS:
7016       /* If X is (minus C Y) where C's least set bit is larger than any bit
7017 	 in the mask, then we may replace with (neg Y).  */
7018       if (GET_CODE (XEXP (x, 0)) == CONST_INT
7019 	  && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7020 					& -INTVAL (XEXP (x, 0))))
7021 	      > mask))
7022 	{
7023 	  x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7024 				  GET_MODE (x));
7025 	  return force_to_mode (x, mode, mask, next_select);
7026 	}
7027 
7028       /* Similarly, if C contains every bit in the fuller_mask, then we may
7029 	 replace with (not Y).  */
7030       if (GET_CODE (XEXP (x, 0)) == CONST_INT
7031 	  && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7032 	      == INTVAL (XEXP (x, 0))))
7033 	{
7034 	  x = simplify_gen_unary (NOT, GET_MODE (x),
7035 				  XEXP (x, 1), GET_MODE (x));
7036 	  return force_to_mode (x, mode, mask, next_select);
7037 	}
7038 
7039       mask = fuller_mask;
7040       goto binop;
7041 
7042     case IOR:
7043     case XOR:
7044       /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7045 	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7046 	 operation which may be a bitfield extraction.  Ensure that the
7047 	 constant we form is not wider than the mode of X.  */
7048 
7049       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7050 	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7051 	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7052 	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7053 	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7054 	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
7055 	       + floor_log2 (INTVAL (XEXP (x, 1))))
7056 	      < GET_MODE_BITSIZE (GET_MODE (x)))
7057 	  && (INTVAL (XEXP (x, 1))
7058 	      & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7059 	{
7060 	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7061 			  << INTVAL (XEXP (XEXP (x, 0), 1)));
7062 	  temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7063 				      XEXP (XEXP (x, 0), 0), temp);
7064 	  x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7065 				   XEXP (XEXP (x, 0), 1));
7066 	  return force_to_mode (x, mode, mask, next_select);
7067 	}
7068 
7069     binop:
7070       /* For most binary operations, just propagate into the operation and
7071 	 change the mode if we have an operation of that mode.  */
7072 
7073       op0 = gen_lowpart_or_truncate (op_mode,
7074 				     force_to_mode (XEXP (x, 0), mode, mask,
7075 						    next_select));
7076       op1 = gen_lowpart_or_truncate (op_mode,
7077 				     force_to_mode (XEXP (x, 1), mode, mask,
7078 					next_select));
7079 
7080       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7081 	x = simplify_gen_binary (code, op_mode, op0, op1);
7082       break;
7083 
7084     case ASHIFT:
7085       /* For left shifts, do the same, but just for the first operand.
7086 	 However, we cannot do anything with shifts where we cannot
7087 	 guarantee that the counts are smaller than the size of the mode
7088 	 because such a count will have a different meaning in a
7089 	 wider mode.  */
7090 
7091       if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
7092 	     && INTVAL (XEXP (x, 1)) >= 0
7093 	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7094 	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7095 		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7096 		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7097 	break;
7098 
7099       /* If the shift count is a constant and we can do arithmetic in
7100 	 the mode of the shift, refine which bits we need.  Otherwise, use the
7101 	 conservative form of the mask.  */
7102       if (GET_CODE (XEXP (x, 1)) == CONST_INT
7103 	  && INTVAL (XEXP (x, 1)) >= 0
7104 	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7105 	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7106 	mask >>= INTVAL (XEXP (x, 1));
7107       else
7108 	mask = fuller_mask;
7109 
7110       op0 = gen_lowpart_or_truncate (op_mode,
7111 				     force_to_mode (XEXP (x, 0), op_mode,
7112 						    mask, next_select));
7113 
7114       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7115 	x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7116       break;
7117 
7118     case LSHIFTRT:
7119       /* Here we can only do something if the shift count is a constant,
7120 	 this shift constant is valid for the host, and we can do arithmetic
7121 	 in OP_MODE.  */
7122 
7123       if (GET_CODE (XEXP (x, 1)) == CONST_INT
7124 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7125 	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7126 	{
7127 	  rtx inner = XEXP (x, 0);
7128 	  unsigned HOST_WIDE_INT inner_mask;
7129 
7130 	  /* Select the mask of the bits we need for the shift operand.  */
7131 	  inner_mask = mask << INTVAL (XEXP (x, 1));
7132 
7133 	  /* We can only change the mode of the shift if we can do arithmetic
7134 	     in the mode of the shift and INNER_MASK is no wider than the
7135 	     width of X's mode.  */
7136 	  if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7137 	    op_mode = GET_MODE (x);
7138 
7139 	  inner = force_to_mode (inner, op_mode, inner_mask, next_select);
7140 
7141 	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7142 	    x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7143 	}
7144 
7145       /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7146 	 shift and AND produces only copies of the sign bit (C2 is one less
7147 	 than a power of two), we can do this with just a shift.  */
7148 
7149       if (GET_CODE (x) == LSHIFTRT
7150 	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7151 	  /* The shift puts one of the sign bit copies in the least significant
7152 	     bit.  */
7153 	  && ((INTVAL (XEXP (x, 1))
7154 	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7155 	      >= GET_MODE_BITSIZE (GET_MODE (x)))
7156 	  && exact_log2 (mask + 1) >= 0
7157 	  /* Number of bits left after the shift must be more than the mask
7158 	     needs.  */
7159 	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7160 	      <= GET_MODE_BITSIZE (GET_MODE (x)))
7161 	  /* Must be more sign bit copies than the mask needs.  */
7162 	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7163 	      >= exact_log2 (mask + 1)))
7164 	x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7165 				 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7166 					  - exact_log2 (mask + 1)));
7167 
7168       goto shiftrt;
7169 
7170     case ASHIFTRT:
7171       /* If we are just looking for the sign bit, we don't need this shift at
7172 	 all, even if it has a variable count.  */
7173       if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7174 	  && (mask == ((unsigned HOST_WIDE_INT) 1
7175 		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7176 	return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7177 
7178       /* If this is a shift by a constant, get a mask that contains those bits
7179 	 that are not copies of the sign bit.  We then have two cases:  If
7180 	 MASK only includes those bits, this can be a logical shift, which may
7181 	 allow simplifications.  If MASK is a single-bit field not within
7182 	 those bits, we are requesting a copy of the sign bit and hence can
7183 	 shift the sign bit to the appropriate location.  */
7184 
7185       if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7186 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7187 	{
7188 	  int i;
7189 
7190 	  /* If the considered data is wider than HOST_WIDE_INT, we can't
7191 	     represent a mask for all its bits in a single scalar.
7192 	     But we only care about the lower bits, so calculate these.  */
7193 
7194 	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7195 	    {
7196 	      nonzero = ~(HOST_WIDE_INT) 0;
7197 
7198 	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7199 		 is the number of bits a full-width mask would have set.
7200 		 We need only shift if these are fewer than nonzero can
7201 		 hold.  If not, we must keep all bits set in nonzero.  */
7202 
7203 	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7204 		  < HOST_BITS_PER_WIDE_INT)
7205 		nonzero >>= INTVAL (XEXP (x, 1))
7206 			    + HOST_BITS_PER_WIDE_INT
7207 			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
7208 	    }
7209 	  else
7210 	    {
7211 	      nonzero = GET_MODE_MASK (GET_MODE (x));
7212 	      nonzero >>= INTVAL (XEXP (x, 1));
7213 	    }
7214 
7215 	  if ((mask & ~nonzero) == 0)
7216 	    {
7217 	      x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x),
7218 					XEXP (x, 0), INTVAL (XEXP (x, 1)));
7219 	      if (GET_CODE (x) != ASHIFTRT)
7220 		return force_to_mode (x, mode, mask, next_select);
7221 	    }
7222 
7223 	  else if ((i = exact_log2 (mask)) >= 0)
7224 	    {
7225 	      x = simplify_shift_const
7226 		  (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7227 		   GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7228 
7229 	      if (GET_CODE (x) != ASHIFTRT)
7230 		return force_to_mode (x, mode, mask, next_select);
7231 	    }
7232 	}
7233 
7234       /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
7235 	 even if the shift count isn't a constant.  */
7236       if (mask == 1)
7237 	x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7238 				 XEXP (x, 0), XEXP (x, 1));
7239 
7240     shiftrt:
7241 
7242       /* If this is a zero- or sign-extension operation that just affects bits
7243 	 we don't care about, remove it.  Be sure the call above returned
7244 	 something that is still a shift.  */
7245 
7246       if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7247 	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7248 	  && INTVAL (XEXP (x, 1)) >= 0
7249 	  && (INTVAL (XEXP (x, 1))
7250 	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7251 	  && GET_CODE (XEXP (x, 0)) == ASHIFT
7252 	  && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7253 	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7254 			      next_select);
7255 
7256       break;
7257 
7258     case ROTATE:
7259     case ROTATERT:
7260       /* If the shift count is constant and we can do computations
7261 	 in the mode of X, compute where the bits we care about are.
7262 	 Otherwise, we can't do anything.  Don't change the mode of
7263 	 the shift or propagate MODE into the shift, though.  */
7264       if (GET_CODE (XEXP (x, 1)) == CONST_INT
7265 	  && INTVAL (XEXP (x, 1)) >= 0)
7266 	{
7267 	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7268 					    GET_MODE (x), GEN_INT (mask),
7269 					    XEXP (x, 1));
7270 	  if (temp && GET_CODE (temp) == CONST_INT)
7271 	    SUBST (XEXP (x, 0),
7272 		   force_to_mode (XEXP (x, 0), GET_MODE (x),
7273 				  INTVAL (temp), next_select));
7274 	}
7275       break;
7276 
7277     case NEG:
7278       /* If we just want the low-order bit, the NEG isn't needed since it
7279 	 won't change the low-order bit.  */
7280       if (mask == 1)
7281 	return force_to_mode (XEXP (x, 0), mode, mask, just_select);
7282 
7283       /* We need any bits less significant than the most significant bit in
7284 	 MASK since carries from those bits will affect the bits we are
7285 	 interested in.  */
7286       mask = fuller_mask;
7287       goto unop;
7288 
7289     case NOT:
7290       /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7291 	 same as the XOR case above.  Ensure that the constant we form is not
7292 	 wider than the mode of X.  */
7293 
7294       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7295 	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7296 	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7297 	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7298 	      < GET_MODE_BITSIZE (GET_MODE (x)))
7299 	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7300 	{
7301 	  temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7302 			       GET_MODE (x));
7303 	  temp = simplify_gen_binary (XOR, GET_MODE (x),
7304 				      XEXP (XEXP (x, 0), 0), temp);
7305 	  x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7306 				   temp, XEXP (XEXP (x, 0), 1));
7307 
7308 	  return force_to_mode (x, mode, mask, next_select);
7309 	}
7310 
7311       /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7312 	 use the full mask inside the NOT.  */
7313       mask = fuller_mask;
7314 
7315     unop:
7316       op0 = gen_lowpart_or_truncate (op_mode,
7317 				     force_to_mode (XEXP (x, 0), mode, mask,
7318 						    next_select));
7319       if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7320 	x = simplify_gen_unary (code, op_mode, op0, op_mode);
7321       break;
7322 
7323     case NE:
7324       /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7325 	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7326 	 which is equal to STORE_FLAG_VALUE.  */
7327       if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7328 	  && GET_MODE (XEXP (x, 0)) == mode
7329 	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7330 	  && (nonzero_bits (XEXP (x, 0), mode)
7331 	      == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
7332 	return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7333 
7334       break;
7335 
7336     case IF_THEN_ELSE:
7337       /* We have no way of knowing if the IF_THEN_ELSE can itself be
7338 	 written in a narrower mode.  We play it safe and do not do so.  */
7339 
7340       SUBST (XEXP (x, 1),
7341 	     gen_lowpart_or_truncate (GET_MODE (x),
7342 				      force_to_mode (XEXP (x, 1), mode,
7343 						     mask, next_select)));
7344       SUBST (XEXP (x, 2),
7345 	     gen_lowpart_or_truncate (GET_MODE (x),
7346 				      force_to_mode (XEXP (x, 2), mode,
7347 						     mask, next_select)));
7348       break;
7349 
7350     default:
7351       break;
7352     }
7353 
7354   /* Ensure we return a value of the proper mode.  */
7355   return gen_lowpart_or_truncate (mode, x);
7356 }
7357 
7358 /* Return nonzero if X is an expression that has one of two values depending on
7359    whether some other value is zero or nonzero.  In that case, we return the
7360    value that is being tested, *PTRUE is set to the value if the rtx being
7361    returned has a nonzero value, and *PFALSE is set to the other alternative.
7362 
7363    If we return zero, we set *PTRUE and *PFALSE to X.  */
7364 
7365 static rtx
if_then_else_cond(rtx x,rtx * ptrue,rtx * pfalse)7366 if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
7367 {
7368   enum machine_mode mode = GET_MODE (x);
7369   enum rtx_code code = GET_CODE (x);
7370   rtx cond0, cond1, true0, true1, false0, false1;
7371   unsigned HOST_WIDE_INT nz;
7372 
7373   /* If we are comparing a value against zero, we are done.  */
7374   if ((code == NE || code == EQ)
7375       && XEXP (x, 1) == const0_rtx)
7376     {
7377       *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7378       *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7379       return XEXP (x, 0);
7380     }
7381 
7382   /* If this is a unary operation whose operand has one of two values, apply
7383      our opcode to compute those values.  */
7384   else if (UNARY_P (x)
7385 	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7386     {
7387       *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7388       *pfalse = simplify_gen_unary (code, mode, false0,
7389 				    GET_MODE (XEXP (x, 0)));
7390       return cond0;
7391     }
7392 
7393   /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7394      make can't possibly match and would suppress other optimizations.  */
7395   else if (code == COMPARE)
7396     ;
7397 
7398   /* If this is a binary operation, see if either side has only one of two
7399      values.  If either one does or if both do and they are conditional on
7400      the same value, compute the new true and false values.  */
7401   else if (BINARY_P (x))
7402     {
7403       cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7404       cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7405 
7406       if ((cond0 != 0 || cond1 != 0)
7407 	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7408 	{
7409 	  /* If if_then_else_cond returned zero, then true/false are the
7410 	     same rtl.  We must copy one of them to prevent invalid rtl
7411 	     sharing.  */
7412 	  if (cond0 == 0)
7413 	    true0 = copy_rtx (true0);
7414 	  else if (cond1 == 0)
7415 	    true1 = copy_rtx (true1);
7416 
7417 	  if (COMPARISON_P (x))
7418 	    {
7419 	      *ptrue = simplify_gen_relational (code, mode, VOIDmode,
7420 						true0, true1);
7421 	      *pfalse = simplify_gen_relational (code, mode, VOIDmode,
7422 						 false0, false1);
7423 	     }
7424 	  else
7425 	    {
7426 	      *ptrue = simplify_gen_binary (code, mode, true0, true1);
7427 	      *pfalse = simplify_gen_binary (code, mode, false0, false1);
7428 	    }
7429 
7430 	  return cond0 ? cond0 : cond1;
7431 	}
7432 
7433       /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7434 	 operands is zero when the other is nonzero, and vice-versa,
7435 	 and STORE_FLAG_VALUE is 1 or -1.  */
7436 
7437       if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7438 	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
7439 	      || code == UMAX)
7440 	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7441 	{
7442 	  rtx op0 = XEXP (XEXP (x, 0), 1);
7443 	  rtx op1 = XEXP (XEXP (x, 1), 1);
7444 
7445 	  cond0 = XEXP (XEXP (x, 0), 0);
7446 	  cond1 = XEXP (XEXP (x, 1), 0);
7447 
7448 	  if (COMPARISON_P (cond0)
7449 	      && COMPARISON_P (cond1)
7450 	      && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7451 		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7452 		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7453 		  || ((swap_condition (GET_CODE (cond0))
7454 		       == reversed_comparison_code (cond1, NULL))
7455 		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7456 		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7457 	      && ! side_effects_p (x))
7458 	    {
7459 	      *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
7460 	      *pfalse = simplify_gen_binary (MULT, mode,
7461 					     (code == MINUS
7462 					      ? simplify_gen_unary (NEG, mode,
7463 								    op1, mode)
7464 					      : op1),
7465 					      const_true_rtx);
7466 	      return cond0;
7467 	    }
7468 	}
7469 
7470       /* Similarly for MULT, AND and UMIN, except that for these the result
7471 	 is always zero.  */
7472       if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7473 	  && (code == MULT || code == AND || code == UMIN)
7474 	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7475 	{
7476 	  cond0 = XEXP (XEXP (x, 0), 0);
7477 	  cond1 = XEXP (XEXP (x, 1), 0);
7478 
7479 	  if (COMPARISON_P (cond0)
7480 	      && COMPARISON_P (cond1)
7481 	      && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7482 		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7483 		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7484 		  || ((swap_condition (GET_CODE (cond0))
7485 		       == reversed_comparison_code (cond1, NULL))
7486 		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7487 		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7488 	      && ! side_effects_p (x))
7489 	    {
7490 	      *ptrue = *pfalse = const0_rtx;
7491 	      return cond0;
7492 	    }
7493 	}
7494     }
7495 
7496   else if (code == IF_THEN_ELSE)
7497     {
7498       /* If we have IF_THEN_ELSE already, extract the condition and
7499 	 canonicalize it if it is NE or EQ.  */
7500       cond0 = XEXP (x, 0);
7501       *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7502       if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7503 	return XEXP (cond0, 0);
7504       else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7505 	{
7506 	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7507 	  return XEXP (cond0, 0);
7508 	}
7509       else
7510 	return cond0;
7511     }
7512 
7513   /* If X is a SUBREG, we can narrow both the true and false values
7514      if the inner expression, if there is a condition.  */
7515   else if (code == SUBREG
7516 	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7517 					       &true0, &false0)))
7518     {
7519       true0 = simplify_gen_subreg (mode, true0,
7520 				   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7521       false0 = simplify_gen_subreg (mode, false0,
7522 				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7523       if (true0 && false0)
7524 	{
7525 	  *ptrue = true0;
7526 	  *pfalse = false0;
7527 	  return cond0;
7528 	}
7529     }
7530 
7531   /* If X is a constant, this isn't special and will cause confusions
7532      if we treat it as such.  Likewise if it is equivalent to a constant.  */
7533   else if (CONSTANT_P (x)
7534 	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7535     ;
7536 
7537   /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7538      will be least confusing to the rest of the compiler.  */
7539   else if (mode == BImode)
7540     {
7541       *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7542       return x;
7543     }
7544 
7545   /* If X is known to be either 0 or -1, those are the true and
7546      false values when testing X.  */
7547   else if (x == constm1_rtx || x == const0_rtx
7548 	   || (mode != VOIDmode
7549 	       && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7550     {
7551       *ptrue = constm1_rtx, *pfalse = const0_rtx;
7552       return x;
7553     }
7554 
7555   /* Likewise for 0 or a single bit.  */
7556   else if (SCALAR_INT_MODE_P (mode)
7557 	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7558 	   && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7559     {
7560       *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7561       return x;
7562     }
7563 
7564   /* Otherwise fail; show no condition with true and false values the same.  */
7565   *ptrue = *pfalse = x;
7566   return 0;
7567 }
7568 
7569 /* Return the value of expression X given the fact that condition COND
7570    is known to be true when applied to REG as its first operand and VAL
7571    as its second.  X is known to not be shared and so can be modified in
7572    place.
7573 
7574    We only handle the simplest cases, and specifically those cases that
7575    arise with IF_THEN_ELSE expressions.  */
7576 
7577 static rtx
known_cond(rtx x,enum rtx_code cond,rtx reg,rtx val)7578 known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
7579 {
7580   enum rtx_code code = GET_CODE (x);
7581   rtx temp;
7582   const char *fmt;
7583   int i, j;
7584 
7585   if (side_effects_p (x))
7586     return x;
7587 
7588   /* If either operand of the condition is a floating point value,
7589      then we have to avoid collapsing an EQ comparison.  */
7590   if (cond == EQ
7591       && rtx_equal_p (x, reg)
7592       && ! FLOAT_MODE_P (GET_MODE (x))
7593       && ! FLOAT_MODE_P (GET_MODE (val)))
7594     return val;
7595 
7596   if (cond == UNEQ && rtx_equal_p (x, reg))
7597     return val;
7598 
7599   /* If X is (abs REG) and we know something about REG's relationship
7600      with zero, we may be able to simplify this.  */
7601 
7602   if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7603     switch (cond)
7604       {
7605       case GE:  case GT:  case EQ:
7606 	return XEXP (x, 0);
7607       case LT:  case LE:
7608 	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7609 				   XEXP (x, 0),
7610 				   GET_MODE (XEXP (x, 0)));
7611       default:
7612 	break;
7613       }
7614 
7615   /* The only other cases we handle are MIN, MAX, and comparisons if the
7616      operands are the same as REG and VAL.  */
7617 
7618   else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
7619     {
7620       if (rtx_equal_p (XEXP (x, 0), val))
7621 	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7622 
7623       if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7624 	{
7625 	  if (COMPARISON_P (x))
7626 	    {
7627 	      if (comparison_dominates_p (cond, code))
7628 		return const_true_rtx;
7629 
7630 	      code = reversed_comparison_code (x, NULL);
7631 	      if (code != UNKNOWN
7632 		  && comparison_dominates_p (cond, code))
7633 		return const0_rtx;
7634 	      else
7635 		return x;
7636 	    }
7637 	  else if (code == SMAX || code == SMIN
7638 		   || code == UMIN || code == UMAX)
7639 	    {
7640 	      int unsignedp = (code == UMIN || code == UMAX);
7641 
7642 	      /* Do not reverse the condition when it is NE or EQ.
7643 		 This is because we cannot conclude anything about
7644 		 the value of 'SMAX (x, y)' when x is not equal to y,
7645 		 but we can when x equals y.  */
7646 	      if ((code == SMAX || code == UMAX)
7647 		  && ! (cond == EQ || cond == NE))
7648 		cond = reverse_condition (cond);
7649 
7650 	      switch (cond)
7651 		{
7652 		case GE:   case GT:
7653 		  return unsignedp ? x : XEXP (x, 1);
7654 		case LE:   case LT:
7655 		  return unsignedp ? x : XEXP (x, 0);
7656 		case GEU:  case GTU:
7657 		  return unsignedp ? XEXP (x, 1) : x;
7658 		case LEU:  case LTU:
7659 		  return unsignedp ? XEXP (x, 0) : x;
7660 		default:
7661 		  break;
7662 		}
7663 	    }
7664 	}
7665     }
7666   else if (code == SUBREG)
7667     {
7668       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7669       rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7670 
7671       if (SUBREG_REG (x) != r)
7672 	{
7673 	  /* We must simplify subreg here, before we lose track of the
7674 	     original inner_mode.  */
7675 	  new = simplify_subreg (GET_MODE (x), r,
7676 				 inner_mode, SUBREG_BYTE (x));
7677 	  if (new)
7678 	    return new;
7679 	  else
7680 	    SUBST (SUBREG_REG (x), r);
7681 	}
7682 
7683       return x;
7684     }
7685   /* We don't have to handle SIGN_EXTEND here, because even in the
7686      case of replacing something with a modeless CONST_INT, a
7687      CONST_INT is already (supposed to be) a valid sign extension for
7688      its narrower mode, which implies it's already properly
7689      sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7690      story is different.  */
7691   else if (code == ZERO_EXTEND)
7692     {
7693       enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7694       rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7695 
7696       if (XEXP (x, 0) != r)
7697 	{
7698 	  /* We must simplify the zero_extend here, before we lose
7699 	     track of the original inner_mode.  */
7700 	  new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7701 					  r, inner_mode);
7702 	  if (new)
7703 	    return new;
7704 	  else
7705 	    SUBST (XEXP (x, 0), r);
7706 	}
7707 
7708       return x;
7709     }
7710 
7711   fmt = GET_RTX_FORMAT (code);
7712   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7713     {
7714       if (fmt[i] == 'e')
7715 	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7716       else if (fmt[i] == 'E')
7717 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7718 	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7719 						cond, reg, val));
7720     }
7721 
7722   return x;
7723 }
7724 
7725 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
7726    assignment as a field assignment.  */
7727 
7728 static int
rtx_equal_for_field_assignment_p(rtx x,rtx y)7729 rtx_equal_for_field_assignment_p (rtx x, rtx y)
7730 {
7731   if (x == y || rtx_equal_p (x, y))
7732     return 1;
7733 
7734   if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7735     return 0;
7736 
7737   /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7738      Note that all SUBREGs of MEM are paradoxical; otherwise they
7739      would have been rewritten.  */
7740   if (MEM_P (x) && GET_CODE (y) == SUBREG
7741       && MEM_P (SUBREG_REG (y))
7742       && rtx_equal_p (SUBREG_REG (y),
7743 		      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
7744     return 1;
7745 
7746   if (MEM_P (y) && GET_CODE (x) == SUBREG
7747       && MEM_P (SUBREG_REG (x))
7748       && rtx_equal_p (SUBREG_REG (x),
7749 		      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
7750     return 1;
7751 
7752   /* We used to see if get_last_value of X and Y were the same but that's
7753      not correct.  In one direction, we'll cause the assignment to have
7754      the wrong destination and in the case, we'll import a register into this
7755      insn that might have already have been dead.   So fail if none of the
7756      above cases are true.  */
7757   return 0;
7758 }
7759 
7760 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
7761    Return that assignment if so.
7762 
7763    We only handle the most common cases.  */
7764 
7765 static rtx
make_field_assignment(rtx x)7766 make_field_assignment (rtx x)
7767 {
7768   rtx dest = SET_DEST (x);
7769   rtx src = SET_SRC (x);
7770   rtx assign;
7771   rtx rhs, lhs;
7772   HOST_WIDE_INT c1;
7773   HOST_WIDE_INT pos;
7774   unsigned HOST_WIDE_INT len;
7775   rtx other;
7776   enum machine_mode mode;
7777 
7778   /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7779      a clear of a one-bit field.  We will have changed it to
7780      (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7781      for a SUBREG.  */
7782 
7783   if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7784       && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7785       && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7786       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7787     {
7788       assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7789 				1, 1, 1, 0);
7790       if (assign != 0)
7791 	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7792       return x;
7793     }
7794 
7795   if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7796       && subreg_lowpart_p (XEXP (src, 0))
7797       && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7798 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7799       && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7800       && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7801       && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7802       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7803     {
7804       assign = make_extraction (VOIDmode, dest, 0,
7805 				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7806 				1, 1, 1, 0);
7807       if (assign != 0)
7808 	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7809       return x;
7810     }
7811 
7812   /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7813      one-bit field.  */
7814   if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7815       && XEXP (XEXP (src, 0), 0) == const1_rtx
7816       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7817     {
7818       assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7819 				1, 1, 1, 0);
7820       if (assign != 0)
7821 	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7822       return x;
7823     }
7824 
7825   /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
7826      SRC is an AND with all bits of that field set, then we can discard
7827      the AND.  */
7828   if (GET_CODE (dest) == ZERO_EXTRACT
7829       && GET_CODE (XEXP (dest, 1)) == CONST_INT
7830       && GET_CODE (src) == AND
7831       && GET_CODE (XEXP (src, 1)) == CONST_INT)
7832     {
7833       HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
7834       unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
7835       unsigned HOST_WIDE_INT ze_mask;
7836 
7837       if (width >= HOST_BITS_PER_WIDE_INT)
7838 	ze_mask = -1;
7839       else
7840 	ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
7841 
7842       /* Complete overlap.  We can remove the source AND.  */
7843       if ((and_mask & ze_mask) == ze_mask)
7844 	return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
7845 
7846       /* Partial overlap.  We can reduce the source AND.  */
7847       if ((and_mask & ze_mask) != and_mask)
7848 	{
7849 	  mode = GET_MODE (src);
7850 	  src = gen_rtx_AND (mode, XEXP (src, 0),
7851 			     gen_int_mode (and_mask & ze_mask, mode));
7852 	  return gen_rtx_SET (VOIDmode, dest, src);
7853 	}
7854     }
7855 
7856   /* The other case we handle is assignments into a constant-position
7857      field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7858      a mask that has all one bits except for a group of zero bits and
7859      OTHER is known to have zeros where C1 has ones, this is such an
7860      assignment.  Compute the position and length from C1.  Shift OTHER
7861      to the appropriate position, force it to the required mode, and
7862      make the extraction.  Check for the AND in both operands.  */
7863 
7864   if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7865     return x;
7866 
7867   rhs = expand_compound_operation (XEXP (src, 0));
7868   lhs = expand_compound_operation (XEXP (src, 1));
7869 
7870   if (GET_CODE (rhs) == AND
7871       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7872       && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7873     c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7874   else if (GET_CODE (lhs) == AND
7875 	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7876 	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7877     c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7878   else
7879     return x;
7880 
7881   pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7882   if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7883       || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7884       || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7885     return x;
7886 
7887   assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7888   if (assign == 0)
7889     return x;
7890 
7891   /* The mode to use for the source is the mode of the assignment, or of
7892      what is inside a possible STRICT_LOW_PART.  */
7893   mode = (GET_CODE (assign) == STRICT_LOW_PART
7894 	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7895 
7896   /* Shift OTHER right POS places and make it the source, restricting it
7897      to the proper length and mode.  */
7898 
7899   src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
7900 						     GET_MODE (src),
7901 						     other, pos),
7902 			       dest);
7903   src = force_to_mode (src, mode,
7904 		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7905 		       ? ~(unsigned HOST_WIDE_INT) 0
7906 		       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7907 		       0);
7908 
7909   /* If SRC is masked by an AND that does not make a difference in
7910      the value being stored, strip it.  */
7911   if (GET_CODE (assign) == ZERO_EXTRACT
7912       && GET_CODE (XEXP (assign, 1)) == CONST_INT
7913       && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
7914       && GET_CODE (src) == AND
7915       && GET_CODE (XEXP (src, 1)) == CONST_INT
7916       && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
7917 	  == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
7918     src = XEXP (src, 0);
7919 
7920   return gen_rtx_SET (VOIDmode, assign, src);
7921 }
7922 
7923 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7924    if so.  */
7925 
7926 static rtx
apply_distributive_law(rtx x)7927 apply_distributive_law (rtx x)
7928 {
7929   enum rtx_code code = GET_CODE (x);
7930   enum rtx_code inner_code;
7931   rtx lhs, rhs, other;
7932   rtx tem;
7933 
7934   /* Distributivity is not true for floating point as it can change the
7935      value.  So we don't do it unless -funsafe-math-optimizations.  */
7936   if (FLOAT_MODE_P (GET_MODE (x))
7937       && ! flag_unsafe_math_optimizations)
7938     return x;
7939 
7940   /* The outer operation can only be one of the following:  */
7941   if (code != IOR && code != AND && code != XOR
7942       && code != PLUS && code != MINUS)
7943     return x;
7944 
7945   lhs = XEXP (x, 0);
7946   rhs = XEXP (x, 1);
7947 
7948   /* If either operand is a primitive we can't do anything, so get out
7949      fast.  */
7950   if (OBJECT_P (lhs) || OBJECT_P (rhs))
7951     return x;
7952 
7953   lhs = expand_compound_operation (lhs);
7954   rhs = expand_compound_operation (rhs);
7955   inner_code = GET_CODE (lhs);
7956   if (inner_code != GET_CODE (rhs))
7957     return x;
7958 
7959   /* See if the inner and outer operations distribute.  */
7960   switch (inner_code)
7961     {
7962     case LSHIFTRT:
7963     case ASHIFTRT:
7964     case AND:
7965     case IOR:
7966       /* These all distribute except over PLUS.  */
7967       if (code == PLUS || code == MINUS)
7968 	return x;
7969       break;
7970 
7971     case MULT:
7972       if (code != PLUS && code != MINUS)
7973 	return x;
7974       break;
7975 
7976     case ASHIFT:
7977       /* This is also a multiply, so it distributes over everything.  */
7978       break;
7979 
7980     case SUBREG:
7981       /* Non-paradoxical SUBREGs distributes over all operations,
7982 	 provided the inner modes and byte offsets are the same, this
7983 	 is an extraction of a low-order part, we don't convert an fp
7984 	 operation to int or vice versa, this is not a vector mode,
7985 	 and we would not be converting a single-word operation into a
7986 	 multi-word operation.  The latter test is not required, but
7987 	 it prevents generating unneeded multi-word operations.  Some
7988 	 of the previous tests are redundant given the latter test,
7989 	 but are retained because they are required for correctness.
7990 
7991 	 We produce the result slightly differently in this case.  */
7992 
7993       if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7994 	  || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7995 	  || ! subreg_lowpart_p (lhs)
7996 	  || (GET_MODE_CLASS (GET_MODE (lhs))
7997 	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7998 	  || (GET_MODE_SIZE (GET_MODE (lhs))
7999 	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8000 	  || VECTOR_MODE_P (GET_MODE (lhs))
8001 	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD
8002 	  /* Result might need to be truncated.  Don't change mode if
8003 	     explicit truncation is needed.  */
8004 	  || !TRULY_NOOP_TRUNCATION
8005 	       (GET_MODE_BITSIZE (GET_MODE (x)),
8006 		GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs)))))
8007 	return x;
8008 
8009       tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8010 				 SUBREG_REG (lhs), SUBREG_REG (rhs));
8011       return gen_lowpart (GET_MODE (x), tem);
8012 
8013     default:
8014       return x;
8015     }
8016 
8017   /* Set LHS and RHS to the inner operands (A and B in the example
8018      above) and set OTHER to the common operand (C in the example).
8019      There is only one way to do this unless the inner operation is
8020      commutative.  */
8021   if (COMMUTATIVE_ARITH_P (lhs)
8022       && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8023     other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8024   else if (COMMUTATIVE_ARITH_P (lhs)
8025 	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8026     other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8027   else if (COMMUTATIVE_ARITH_P (lhs)
8028 	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8029     other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8030   else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8031     other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8032   else
8033     return x;
8034 
8035   /* Form the new inner operation, seeing if it simplifies first.  */
8036   tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8037 
8038   /* There is one exception to the general way of distributing:
8039      (a | c) ^ (b | c) -> (a ^ b) & ~c  */
8040   if (code == XOR && inner_code == IOR)
8041     {
8042       inner_code = AND;
8043       other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8044     }
8045 
8046   /* We may be able to continuing distributing the result, so call
8047      ourselves recursively on the inner operation before forming the
8048      outer operation, which we return.  */
8049   return simplify_gen_binary (inner_code, GET_MODE (x),
8050 			      apply_distributive_law (tem), other);
8051 }
8052 
8053 /* See if X is of the form (* (+ A B) C), and if so convert to
8054    (+ (* A C) (* B C)) and try to simplify.
8055 
8056    Most of the time, this results in no change.  However, if some of
8057    the operands are the same or inverses of each other, simplifications
8058    will result.
8059 
8060    For example, (and (ior A B) (not B)) can occur as the result of
8061    expanding a bit field assignment.  When we apply the distributive
8062    law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8063    which then simplifies to (and (A (not B))).
8064 
8065    Note that no checks happen on the validity of applying the inverse
8066    distributive law.  This is pointless since we can do it in the
8067    few places where this routine is called.
8068 
8069    N is the index of the term that is decomposed (the arithmetic operation,
8070    i.e. (+ A B) in the first example above).  !N is the index of the term that
8071    is distributed, i.e. of C in the first example above.  */
8072 static rtx
distribute_and_simplify_rtx(rtx x,int n)8073 distribute_and_simplify_rtx (rtx x, int n)
8074 {
8075   enum machine_mode mode;
8076   enum rtx_code outer_code, inner_code;
8077   rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8078 
8079   decomposed = XEXP (x, n);
8080   if (!ARITHMETIC_P (decomposed))
8081     return NULL_RTX;
8082 
8083   mode = GET_MODE (x);
8084   outer_code = GET_CODE (x);
8085   distributed = XEXP (x, !n);
8086 
8087   inner_code = GET_CODE (decomposed);
8088   inner_op0 = XEXP (decomposed, 0);
8089   inner_op1 = XEXP (decomposed, 1);
8090 
8091   /* Special case (and (xor B C) (not A)), which is equivalent to
8092      (xor (ior A B) (ior A C))  */
8093   if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8094     {
8095       distributed = XEXP (distributed, 0);
8096       outer_code = IOR;
8097     }
8098 
8099   if (n == 0)
8100     {
8101       /* Distribute the second term.  */
8102       new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8103       new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8104     }
8105   else
8106     {
8107       /* Distribute the first term.  */
8108       new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8109       new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8110     }
8111 
8112   tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8113 						     new_op0, new_op1));
8114   if (GET_CODE (tmp) != outer_code
8115       && rtx_cost (tmp, SET) < rtx_cost (x, SET))
8116     return tmp;
8117 
8118   return NULL_RTX;
8119 }
8120 
8121 /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8122    in MODE.  Return an equivalent form, if different from (and VAROP
8123    (const_int CONSTOP)).  Otherwise, return NULL_RTX.  */
8124 
8125 static rtx
simplify_and_const_int_1(enum machine_mode mode,rtx varop,unsigned HOST_WIDE_INT constop)8126 simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
8127 			  unsigned HOST_WIDE_INT constop)
8128 {
8129   unsigned HOST_WIDE_INT nonzero;
8130   unsigned HOST_WIDE_INT orig_constop;
8131   rtx orig_varop;
8132   int i;
8133 
8134   orig_varop = varop;
8135   orig_constop = constop;
8136   if (GET_CODE (varop) == CLOBBER)
8137     return NULL_RTX;
8138 
8139   /* Simplify VAROP knowing that we will be only looking at some of the
8140      bits in it.
8141 
8142      Note by passing in CONSTOP, we guarantee that the bits not set in
8143      CONSTOP are not significant and will never be examined.  We must
8144      ensure that is the case by explicitly masking out those bits
8145      before returning.  */
8146   varop = force_to_mode (varop, mode, constop, 0);
8147 
8148   /* If VAROP is a CLOBBER, we will fail so return it.  */
8149   if (GET_CODE (varop) == CLOBBER)
8150     return varop;
8151 
8152   /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8153      to VAROP and return the new constant.  */
8154   if (GET_CODE (varop) == CONST_INT)
8155     return gen_int_mode (INTVAL (varop) & constop, mode);
8156 
8157   /* See what bits may be nonzero in VAROP.  Unlike the general case of
8158      a call to nonzero_bits, here we don't care about bits outside
8159      MODE.  */
8160 
8161   nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8162 
8163   /* Turn off all bits in the constant that are known to already be zero.
8164      Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8165      which is tested below.  */
8166 
8167   constop &= nonzero;
8168 
8169   /* If we don't have any bits left, return zero.  */
8170   if (constop == 0)
8171     return const0_rtx;
8172 
8173   /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8174      a power of two, we can replace this with an ASHIFT.  */
8175   if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8176       && (i = exact_log2 (constop)) >= 0)
8177     return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8178 
8179   /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8180      or XOR, then try to apply the distributive law.  This may eliminate
8181      operations if either branch can be simplified because of the AND.
8182      It may also make some cases more complex, but those cases probably
8183      won't match a pattern either with or without this.  */
8184 
8185   if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8186     return
8187       gen_lowpart
8188 	(mode,
8189 	 apply_distributive_law
8190 	 (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8191 			       simplify_and_const_int (NULL_RTX,
8192 						       GET_MODE (varop),
8193 						       XEXP (varop, 0),
8194 						       constop),
8195 			       simplify_and_const_int (NULL_RTX,
8196 						       GET_MODE (varop),
8197 						       XEXP (varop, 1),
8198 						       constop))));
8199 
8200   /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8201      the AND and see if one of the operands simplifies to zero.  If so, we
8202      may eliminate it.  */
8203 
8204   if (GET_CODE (varop) == PLUS
8205       && exact_log2 (constop + 1) >= 0)
8206     {
8207       rtx o0, o1;
8208 
8209       o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8210       o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8211       if (o0 == const0_rtx)
8212 	return o1;
8213       if (o1 == const0_rtx)
8214 	return o0;
8215     }
8216 
8217   /* Make a SUBREG if necessary.  If we can't make it, fail.  */
8218   varop = gen_lowpart (mode, varop);
8219   if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
8220     return NULL_RTX;
8221 
8222   /* If we are only masking insignificant bits, return VAROP.  */
8223   if (constop == nonzero)
8224     return varop;
8225 
8226   if (varop == orig_varop && constop == orig_constop)
8227     return NULL_RTX;
8228 
8229   /* Otherwise, return an AND.  */
8230   return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
8231 }
8232 
8233 
8234 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8235    in MODE.
8236 
8237    Return an equivalent form, if different from X.  Otherwise, return X.  If
8238    X is zero, we are to always construct the equivalent form.  */
8239 
8240 static rtx
simplify_and_const_int(rtx x,enum machine_mode mode,rtx varop,unsigned HOST_WIDE_INT constop)8241 simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
8242 			unsigned HOST_WIDE_INT constop)
8243 {
8244   rtx tem = simplify_and_const_int_1 (mode, varop, constop);
8245   if (tem)
8246     return tem;
8247 
8248   if (!x)
8249     x = simplify_gen_binary (AND, GET_MODE (varop), varop,
8250 			     gen_int_mode (constop, mode));
8251   if (GET_MODE (x) != mode)
8252     x = gen_lowpart (mode, x);
8253   return x;
8254 }
8255 
8256 /* Given a REG, X, compute which bits in X can be nonzero.
8257    We don't care about bits outside of those defined in MODE.
8258 
8259    For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8260    a shift, AND, or zero_extract, we can do better.  */
8261 
8262 static rtx
reg_nonzero_bits_for_combine(rtx x,enum machine_mode mode,rtx known_x ATTRIBUTE_UNUSED,enum machine_mode known_mode ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT * nonzero)8263 reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode,
8264 			      rtx known_x ATTRIBUTE_UNUSED,
8265 			      enum machine_mode known_mode ATTRIBUTE_UNUSED,
8266 			      unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
8267 			      unsigned HOST_WIDE_INT *nonzero)
8268 {
8269   rtx tem;
8270 
8271   /* If X is a register whose nonzero bits value is current, use it.
8272      Otherwise, if X is a register whose value we can find, use that
8273      value.  Otherwise, use the previously-computed global nonzero bits
8274      for this register.  */
8275 
8276   if (reg_stat[REGNO (x)].last_set_value != 0
8277       && (reg_stat[REGNO (x)].last_set_mode == mode
8278 	  || (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT
8279 	      && GET_MODE_CLASS (mode) == MODE_INT))
8280       && (reg_stat[REGNO (x)].last_set_label == label_tick
8281 	  || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8282 	      && REG_N_SETS (REGNO (x)) == 1
8283 	      && ! REGNO_REG_SET_P
8284 		 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8285 		  REGNO (x))))
8286       && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8287     {
8288       *nonzero &= reg_stat[REGNO (x)].last_set_nonzero_bits;
8289       return NULL;
8290     }
8291 
8292   tem = get_last_value (x);
8293 
8294   if (tem)
8295     {
8296 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8297       /* If X is narrower than MODE and TEM is a non-negative
8298 	 constant that would appear negative in the mode of X,
8299 	 sign-extend it for use in reg_nonzero_bits because some
8300 	 machines (maybe most) will actually do the sign-extension
8301 	 and this is the conservative approach.
8302 
8303 	 ??? For 2.5, try to tighten up the MD files in this regard
8304 	 instead of this kludge.  */
8305 
8306       if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
8307 	  && GET_CODE (tem) == CONST_INT
8308 	  && INTVAL (tem) > 0
8309 	  && 0 != (INTVAL (tem)
8310 		   & ((HOST_WIDE_INT) 1
8311 		      << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8312 	tem = GEN_INT (INTVAL (tem)
8313 		       | ((HOST_WIDE_INT) (-1)
8314 			  << GET_MODE_BITSIZE (GET_MODE (x))));
8315 #endif
8316       return tem;
8317     }
8318   else if (nonzero_sign_valid && reg_stat[REGNO (x)].nonzero_bits)
8319     {
8320       unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits;
8321 
8322       if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
8323 	/* We don't know anything about the upper bits.  */
8324 	mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8325       *nonzero &= mask;
8326     }
8327 
8328   return NULL;
8329 }
8330 
8331 /* Return the number of bits at the high-order end of X that are known to
8332    be equal to the sign bit.  X will be used in mode MODE; if MODE is
8333    VOIDmode, X will be used in its own mode.  The returned value  will always
8334    be between 1 and the number of bits in MODE.  */
8335 
8336 static rtx
reg_num_sign_bit_copies_for_combine(rtx x,enum machine_mode mode,rtx known_x ATTRIBUTE_UNUSED,enum machine_mode known_mode ATTRIBUTE_UNUSED,unsigned int known_ret ATTRIBUTE_UNUSED,unsigned int * result)8337 reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode,
8338 				     rtx known_x ATTRIBUTE_UNUSED,
8339 				     enum machine_mode known_mode
8340 				     ATTRIBUTE_UNUSED,
8341 				     unsigned int known_ret ATTRIBUTE_UNUSED,
8342 				     unsigned int *result)
8343 {
8344   rtx tem;
8345 
8346   if (reg_stat[REGNO (x)].last_set_value != 0
8347       && reg_stat[REGNO (x)].last_set_mode == mode
8348       && (reg_stat[REGNO (x)].last_set_label == label_tick
8349 	  || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8350 	      && REG_N_SETS (REGNO (x)) == 1
8351 	      && ! REGNO_REG_SET_P
8352 		 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8353 		  REGNO (x))))
8354       && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8355     {
8356       *result = reg_stat[REGNO (x)].last_set_sign_bit_copies;
8357       return NULL;
8358     }
8359 
8360   tem = get_last_value (x);
8361   if (tem != 0)
8362     return tem;
8363 
8364   if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0
8365       && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
8366     *result = reg_stat[REGNO (x)].sign_bit_copies;
8367 
8368   return NULL;
8369 }
8370 
8371 /* Return the number of "extended" bits there are in X, when interpreted
8372    as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8373    unsigned quantities, this is the number of high-order zero bits.
8374    For signed quantities, this is the number of copies of the sign bit
8375    minus 1.  In both case, this function returns the number of "spare"
8376    bits.  For example, if two quantities for which this function returns
8377    at least 1 are added, the addition is known not to overflow.
8378 
8379    This function will always return 0 unless called during combine, which
8380    implies that it must be called from a define_split.  */
8381 
8382 unsigned int
extended_count(rtx x,enum machine_mode mode,int unsignedp)8383 extended_count (rtx x, enum machine_mode mode, int unsignedp)
8384 {
8385   if (nonzero_sign_valid == 0)
8386     return 0;
8387 
8388   return (unsignedp
8389 	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8390 	     ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8391 			       - floor_log2 (nonzero_bits (x, mode)))
8392 	     : 0)
8393 	  : num_sign_bit_copies (x, mode) - 1);
8394 }
8395 
8396 /* This function is called from `simplify_shift_const' to merge two
8397    outer operations.  Specifically, we have already found that we need
8398    to perform operation *POP0 with constant *PCONST0 at the outermost
8399    position.  We would now like to also perform OP1 with constant CONST1
8400    (with *POP0 being done last).
8401 
8402    Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8403    the resulting operation.  *PCOMP_P is set to 1 if we would need to
8404    complement the innermost operand, otherwise it is unchanged.
8405 
8406    MODE is the mode in which the operation will be done.  No bits outside
8407    the width of this mode matter.  It is assumed that the width of this mode
8408    is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8409 
8410    If *POP0 or OP1 are UNKNOWN, it means no operation is required.  Only NEG, PLUS,
8411    IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8412    result is simply *PCONST0.
8413 
8414    If the resulting operation cannot be expressed as one operation, we
8415    return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
8416 
8417 static int
merge_outer_ops(enum rtx_code * pop0,HOST_WIDE_INT * pconst0,enum rtx_code op1,HOST_WIDE_INT const1,enum machine_mode mode,int * pcomp_p)8418 merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
8419 {
8420   enum rtx_code op0 = *pop0;
8421   HOST_WIDE_INT const0 = *pconst0;
8422 
8423   const0 &= GET_MODE_MASK (mode);
8424   const1 &= GET_MODE_MASK (mode);
8425 
8426   /* If OP0 is an AND, clear unimportant bits in CONST1.  */
8427   if (op0 == AND)
8428     const1 &= const0;
8429 
8430   /* If OP0 or OP1 is UNKNOWN, this is easy.  Similarly if they are the same or
8431      if OP0 is SET.  */
8432 
8433   if (op1 == UNKNOWN || op0 == SET)
8434     return 1;
8435 
8436   else if (op0 == UNKNOWN)
8437     op0 = op1, const0 = const1;
8438 
8439   else if (op0 == op1)
8440     {
8441       switch (op0)
8442 	{
8443 	case AND:
8444 	  const0 &= const1;
8445 	  break;
8446 	case IOR:
8447 	  const0 |= const1;
8448 	  break;
8449 	case XOR:
8450 	  const0 ^= const1;
8451 	  break;
8452 	case PLUS:
8453 	  const0 += const1;
8454 	  break;
8455 	case NEG:
8456 	  op0 = UNKNOWN;
8457 	  break;
8458 	default:
8459 	  break;
8460 	}
8461     }
8462 
8463   /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
8464   else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8465     return 0;
8466 
8467   /* If the two constants aren't the same, we can't do anything.  The
8468      remaining six cases can all be done.  */
8469   else if (const0 != const1)
8470     return 0;
8471 
8472   else
8473     switch (op0)
8474       {
8475       case IOR:
8476 	if (op1 == AND)
8477 	  /* (a & b) | b == b */
8478 	  op0 = SET;
8479 	else /* op1 == XOR */
8480 	  /* (a ^ b) | b == a | b */
8481 	  {;}
8482 	break;
8483 
8484       case XOR:
8485 	if (op1 == AND)
8486 	  /* (a & b) ^ b == (~a) & b */
8487 	  op0 = AND, *pcomp_p = 1;
8488 	else /* op1 == IOR */
8489 	  /* (a | b) ^ b == a & ~b */
8490 	  op0 = AND, const0 = ~const0;
8491 	break;
8492 
8493       case AND:
8494 	if (op1 == IOR)
8495 	  /* (a | b) & b == b */
8496 	op0 = SET;
8497 	else /* op1 == XOR */
8498 	  /* (a ^ b) & b) == (~a) & b */
8499 	  *pcomp_p = 1;
8500 	break;
8501       default:
8502 	break;
8503       }
8504 
8505   /* Check for NO-OP cases.  */
8506   const0 &= GET_MODE_MASK (mode);
8507   if (const0 == 0
8508       && (op0 == IOR || op0 == XOR || op0 == PLUS))
8509     op0 = UNKNOWN;
8510   else if (const0 == 0 && op0 == AND)
8511     op0 = SET;
8512   else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8513 	   && op0 == AND)
8514     op0 = UNKNOWN;
8515 
8516   /* ??? Slightly redundant with the above mask, but not entirely.
8517      Moving this above means we'd have to sign-extend the mode mask
8518      for the final test.  */
8519   const0 = trunc_int_for_mode (const0, mode);
8520 
8521   *pop0 = op0;
8522   *pconst0 = const0;
8523 
8524   return 1;
8525 }
8526 
8527 /* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
8528    The result of the shift is RESULT_MODE.  Return NULL_RTX if we cannot
8529    simplify it.  Otherwise, return a simplified value.
8530 
8531    The shift is normally computed in the widest mode we find in VAROP, as
8532    long as it isn't a different number of words than RESULT_MODE.  Exceptions
8533    are ASHIFTRT and ROTATE, which are always done in their original mode.  */
8534 
8535 static rtx
simplify_shift_const_1(enum rtx_code code,enum machine_mode result_mode,rtx varop,int orig_count)8536 simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
8537 			rtx varop, int orig_count)
8538 {
8539   enum rtx_code orig_code = code;
8540   rtx orig_varop = varop;
8541   int count;
8542   enum machine_mode mode = result_mode;
8543   enum machine_mode shift_mode, tmode;
8544   unsigned int mode_words
8545     = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8546   /* We form (outer_op (code varop count) (outer_const)).  */
8547   enum rtx_code outer_op = UNKNOWN;
8548   HOST_WIDE_INT outer_const = 0;
8549   int complement_p = 0;
8550   rtx new, x;
8551 
8552   /* Make sure and truncate the "natural" shift on the way in.  We don't
8553      want to do this inside the loop as it makes it more difficult to
8554      combine shifts.  */
8555   if (SHIFT_COUNT_TRUNCATED)
8556     orig_count &= GET_MODE_BITSIZE (mode) - 1;
8557 
8558   /* If we were given an invalid count, don't do anything except exactly
8559      what was requested.  */
8560 
8561   if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
8562     return NULL_RTX;
8563 
8564   count = orig_count;
8565 
8566   /* Unless one of the branches of the `if' in this loop does a `continue',
8567      we will `break' the loop after the `if'.  */
8568 
8569   while (count != 0)
8570     {
8571       /* If we have an operand of (clobber (const_int 0)), fail.  */
8572       if (GET_CODE (varop) == CLOBBER)
8573 	return NULL_RTX;
8574 
8575       /* If we discovered we had to complement VAROP, leave.  Making a NOT
8576 	 here would cause an infinite loop.  */
8577       if (complement_p)
8578 	break;
8579 
8580       /* Convert ROTATERT to ROTATE.  */
8581       if (code == ROTATERT)
8582 	{
8583 	  unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
8584 	  code = ROTATE;
8585 	  if (VECTOR_MODE_P (result_mode))
8586 	    count = bitsize / GET_MODE_NUNITS (result_mode) - count;
8587 	  else
8588 	    count = bitsize - count;
8589 	}
8590 
8591       /* We need to determine what mode we will do the shift in.  If the
8592 	 shift is a right shift or a ROTATE, we must always do it in the mode
8593 	 it was originally done in.  Otherwise, we can do it in MODE, the
8594 	 widest mode encountered.  */
8595       shift_mode
8596 	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8597 	   ? result_mode : mode);
8598 
8599       /* Handle cases where the count is greater than the size of the mode
8600 	 minus 1.  For ASHIFT, use the size minus one as the count (this can
8601 	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
8602 	 take the count modulo the size.  For other shifts, the result is
8603 	 zero.
8604 
8605 	 Since these shifts are being produced by the compiler by combining
8606 	 multiple operations, each of which are defined, we know what the
8607 	 result is supposed to be.  */
8608 
8609       if (count > (GET_MODE_BITSIZE (shift_mode) - 1))
8610 	{
8611 	  if (code == ASHIFTRT)
8612 	    count = GET_MODE_BITSIZE (shift_mode) - 1;
8613 	  else if (code == ROTATE || code == ROTATERT)
8614 	    count %= GET_MODE_BITSIZE (shift_mode);
8615 	  else
8616 	    {
8617 	      /* We can't simply return zero because there may be an
8618 		 outer op.  */
8619 	      varop = const0_rtx;
8620 	      count = 0;
8621 	      break;
8622 	    }
8623 	}
8624 
8625       /* An arithmetic right shift of a quantity known to be -1 or 0
8626 	 is a no-op.  */
8627       if (code == ASHIFTRT
8628 	  && (num_sign_bit_copies (varop, shift_mode)
8629 	      == GET_MODE_BITSIZE (shift_mode)))
8630 	{
8631 	  count = 0;
8632 	  break;
8633 	}
8634 
8635       /* If we are doing an arithmetic right shift and discarding all but
8636 	 the sign bit copies, this is equivalent to doing a shift by the
8637 	 bitsize minus one.  Convert it into that shift because it will often
8638 	 allow other simplifications.  */
8639 
8640       if (code == ASHIFTRT
8641 	  && (count + num_sign_bit_copies (varop, shift_mode)
8642 	      >= GET_MODE_BITSIZE (shift_mode)))
8643 	count = GET_MODE_BITSIZE (shift_mode) - 1;
8644 
8645       /* We simplify the tests below and elsewhere by converting
8646 	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8647 	 `make_compound_operation' will convert it to an ASHIFTRT for
8648 	 those machines (such as VAX) that don't have an LSHIFTRT.  */
8649       if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8650 	  && code == ASHIFTRT
8651 	  && ((nonzero_bits (varop, shift_mode)
8652 	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8653 	      == 0))
8654 	code = LSHIFTRT;
8655 
8656       if (((code == LSHIFTRT
8657 	    && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8658 	    && !(nonzero_bits (varop, shift_mode) >> count))
8659 	   || (code == ASHIFT
8660 	       && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8661 	       && !((nonzero_bits (varop, shift_mode) << count)
8662 		    & GET_MODE_MASK (shift_mode))))
8663 	  && !side_effects_p (varop))
8664 	varop = const0_rtx;
8665 
8666       switch (GET_CODE (varop))
8667 	{
8668 	case SIGN_EXTEND:
8669 	case ZERO_EXTEND:
8670 	case SIGN_EXTRACT:
8671 	case ZERO_EXTRACT:
8672 	  new = expand_compound_operation (varop);
8673 	  if (new != varop)
8674 	    {
8675 	      varop = new;
8676 	      continue;
8677 	    }
8678 	  break;
8679 
8680 	case MEM:
8681 	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8682 	     minus the width of a smaller mode, we can do this with a
8683 	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
8684 	  if ((code == ASHIFTRT || code == LSHIFTRT)
8685 	      && ! mode_dependent_address_p (XEXP (varop, 0))
8686 	      && ! MEM_VOLATILE_P (varop)
8687 	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8688 					 MODE_INT, 1)) != BLKmode)
8689 	    {
8690 	      new = adjust_address_nv (varop, tmode,
8691 				       BYTES_BIG_ENDIAN ? 0
8692 				       : count / BITS_PER_UNIT);
8693 
8694 	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8695 				     : ZERO_EXTEND, mode, new);
8696 	      count = 0;
8697 	      continue;
8698 	    }
8699 	  break;
8700 
8701 	case SUBREG:
8702 	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
8703 	     the same number of words as what we've seen so far.  Then store
8704 	     the widest mode in MODE.  */
8705 	  if (subreg_lowpart_p (varop)
8706 	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8707 		  > GET_MODE_SIZE (GET_MODE (varop)))
8708 	      && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8709 				  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8710 		 == mode_words)
8711 	    {
8712 	      varop = SUBREG_REG (varop);
8713 	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8714 		mode = GET_MODE (varop);
8715 	      continue;
8716 	    }
8717 	  break;
8718 
8719 	case MULT:
8720 	  /* Some machines use MULT instead of ASHIFT because MULT
8721 	     is cheaper.  But it is still better on those machines to
8722 	     merge two shifts into one.  */
8723 	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8724 	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8725 	    {
8726 	      varop
8727 		= simplify_gen_binary (ASHIFT, GET_MODE (varop),
8728 				       XEXP (varop, 0),
8729 				       GEN_INT (exact_log2 (
8730 						INTVAL (XEXP (varop, 1)))));
8731 	      continue;
8732 	    }
8733 	  break;
8734 
8735 	case UDIV:
8736 	  /* Similar, for when divides are cheaper.  */
8737 	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8738 	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8739 	    {
8740 	      varop
8741 		= simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
8742 				       XEXP (varop, 0),
8743 				       GEN_INT (exact_log2 (
8744 						INTVAL (XEXP (varop, 1)))));
8745 	      continue;
8746 	    }
8747 	  break;
8748 
8749 	case ASHIFTRT:
8750 	  /* If we are extracting just the sign bit of an arithmetic
8751 	     right shift, that shift is not needed.  However, the sign
8752 	     bit of a wider mode may be different from what would be
8753 	     interpreted as the sign bit in a narrower mode, so, if
8754 	     the result is narrower, don't discard the shift.  */
8755 	  if (code == LSHIFTRT
8756 	      && count == (GET_MODE_BITSIZE (result_mode) - 1)
8757 	      && (GET_MODE_BITSIZE (result_mode)
8758 		  >= GET_MODE_BITSIZE (GET_MODE (varop))))
8759 	    {
8760 	      varop = XEXP (varop, 0);
8761 	      continue;
8762 	    }
8763 
8764 	  /* ... fall through ...  */
8765 
8766 	case LSHIFTRT:
8767 	case ASHIFT:
8768 	case ROTATE:
8769 	  /* Here we have two nested shifts.  The result is usually the
8770 	     AND of a new shift with a mask.  We compute the result below.  */
8771 	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8772 	      && INTVAL (XEXP (varop, 1)) >= 0
8773 	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8774 	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8775 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8776 	      && !VECTOR_MODE_P (result_mode))
8777 	    {
8778 	      enum rtx_code first_code = GET_CODE (varop);
8779 	      unsigned int first_count = INTVAL (XEXP (varop, 1));
8780 	      unsigned HOST_WIDE_INT mask;
8781 	      rtx mask_rtx;
8782 
8783 	      /* We have one common special case.  We can't do any merging if
8784 		 the inner code is an ASHIFTRT of a smaller mode.  However, if
8785 		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8786 		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8787 		 we can convert it to
8788 		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8789 		 This simplifies certain SIGN_EXTEND operations.  */
8790 	      if (code == ASHIFT && first_code == ASHIFTRT
8791 		  && count == (GET_MODE_BITSIZE (result_mode)
8792 			       - GET_MODE_BITSIZE (GET_MODE (varop))))
8793 		{
8794 		  /* C3 has the low-order C1 bits zero.  */
8795 
8796 		  mask = (GET_MODE_MASK (mode)
8797 			  & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
8798 
8799 		  varop = simplify_and_const_int (NULL_RTX, result_mode,
8800 						  XEXP (varop, 0), mask);
8801 		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
8802 						varop, count);
8803 		  count = first_count;
8804 		  code = ASHIFTRT;
8805 		  continue;
8806 		}
8807 
8808 	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8809 		 than C1 high-order bits equal to the sign bit, we can convert
8810 		 this to either an ASHIFT or an ASHIFTRT depending on the
8811 		 two counts.
8812 
8813 		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
8814 
8815 	      if (code == ASHIFTRT && first_code == ASHIFT
8816 		  && GET_MODE (varop) == shift_mode
8817 		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8818 		      > first_count))
8819 		{
8820 		  varop = XEXP (varop, 0);
8821 		  count -= first_count;
8822 		  if (count < 0)
8823 		    {
8824 		      count = -count;
8825 		      code = ASHIFT;
8826 		    }
8827 
8828 		  continue;
8829 		}
8830 
8831 	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
8832 		 we can only do this if FIRST_CODE is also ASHIFTRT.
8833 
8834 		 We can't do the case when CODE is ROTATE and FIRST_CODE is
8835 		 ASHIFTRT.
8836 
8837 		 If the mode of this shift is not the mode of the outer shift,
8838 		 we can't do this if either shift is a right shift or ROTATE.
8839 
8840 		 Finally, we can't do any of these if the mode is too wide
8841 		 unless the codes are the same.
8842 
8843 		 Handle the case where the shift codes are the same
8844 		 first.  */
8845 
8846 	      if (code == first_code)
8847 		{
8848 		  if (GET_MODE (varop) != result_mode
8849 		      && (code == ASHIFTRT || code == LSHIFTRT
8850 			  || code == ROTATE))
8851 		    break;
8852 
8853 		  count += first_count;
8854 		  varop = XEXP (varop, 0);
8855 		  continue;
8856 		}
8857 
8858 	      if (code == ASHIFTRT
8859 		  || (code == ROTATE && first_code == ASHIFTRT)
8860 		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
8861 		  || (GET_MODE (varop) != result_mode
8862 		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
8863 			  || first_code == ROTATE
8864 			  || code == ROTATE)))
8865 		break;
8866 
8867 	      /* To compute the mask to apply after the shift, shift the
8868 		 nonzero bits of the inner shift the same way the
8869 		 outer shift will.  */
8870 
8871 	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
8872 
8873 	      mask_rtx
8874 		= simplify_const_binary_operation (code, result_mode, mask_rtx,
8875 						   GEN_INT (count));
8876 
8877 	      /* Give up if we can't compute an outer operation to use.  */
8878 	      if (mask_rtx == 0
8879 		  || GET_CODE (mask_rtx) != CONST_INT
8880 		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
8881 					INTVAL (mask_rtx),
8882 					result_mode, &complement_p))
8883 		break;
8884 
8885 	      /* If the shifts are in the same direction, we add the
8886 		 counts.  Otherwise, we subtract them.  */
8887 	      if ((code == ASHIFTRT || code == LSHIFTRT)
8888 		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8889 		count += first_count;
8890 	      else
8891 		count -= first_count;
8892 
8893 	      /* If COUNT is positive, the new shift is usually CODE,
8894 		 except for the two exceptions below, in which case it is
8895 		 FIRST_CODE.  If the count is negative, FIRST_CODE should
8896 		 always be used  */
8897 	      if (count > 0
8898 		  && ((first_code == ROTATE && code == ASHIFT)
8899 		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
8900 		code = first_code;
8901 	      else if (count < 0)
8902 		code = first_code, count = -count;
8903 
8904 	      varop = XEXP (varop, 0);
8905 	      continue;
8906 	    }
8907 
8908 	  /* If we have (A << B << C) for any shift, we can convert this to
8909 	     (A << C << B).  This wins if A is a constant.  Only try this if
8910 	     B is not a constant.  */
8911 
8912 	  else if (GET_CODE (varop) == code
8913 		   && GET_CODE (XEXP (varop, 0)) == CONST_INT
8914 		   && GET_CODE (XEXP (varop, 1)) != CONST_INT)
8915 	    {
8916 	      rtx new = simplify_const_binary_operation (code, mode,
8917 							 XEXP (varop, 0),
8918 							 GEN_INT (count));
8919 	      varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
8920 	      count = 0;
8921 	      continue;
8922 	    }
8923 	  break;
8924 
8925 	case NOT:
8926 	  /* Make this fit the case below.  */
8927 	  varop = gen_rtx_XOR (mode, XEXP (varop, 0),
8928 			       GEN_INT (GET_MODE_MASK (mode)));
8929 	  continue;
8930 
8931 	case IOR:
8932 	case AND:
8933 	case XOR:
8934 	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8935 	     with C the size of VAROP - 1 and the shift is logical if
8936 	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8937 	     we have an (le X 0) operation.   If we have an arithmetic shift
8938 	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
8939 	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
8940 
8941 	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8942 	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8943 	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8944 	      && (code == LSHIFTRT || code == ASHIFTRT)
8945 	      && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
8946 	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8947 	    {
8948 	      count = 0;
8949 	      varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
8950 				  const0_rtx);
8951 
8952 	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8953 		varop = gen_rtx_NEG (GET_MODE (varop), varop);
8954 
8955 	      continue;
8956 	    }
8957 
8958 	  /* If we have (shift (logical)), move the logical to the outside
8959 	     to allow it to possibly combine with another logical and the
8960 	     shift to combine with another shift.  This also canonicalizes to
8961 	     what a ZERO_EXTRACT looks like.  Also, some machines have
8962 	     (and (shift)) insns.  */
8963 
8964 	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8965 	      /* We can't do this if we have (ashiftrt (xor))  and the
8966 		 constant has its sign bit set in shift_mode.  */
8967 	      && !(code == ASHIFTRT && GET_CODE (varop) == XOR
8968 		   && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
8969 					      shift_mode))
8970 	      && (new = simplify_const_binary_operation (code, result_mode,
8971 							 XEXP (varop, 1),
8972 							 GEN_INT (count))) != 0
8973 	      && GET_CODE (new) == CONST_INT
8974 	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8975 				  INTVAL (new), result_mode, &complement_p))
8976 	    {
8977 	      varop = XEXP (varop, 0);
8978 	      continue;
8979 	    }
8980 
8981 	  /* If we can't do that, try to simplify the shift in each arm of the
8982 	     logical expression, make a new logical expression, and apply
8983 	     the inverse distributive law.  This also can't be done
8984 	     for some (ashiftrt (xor)).  */
8985 	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8986 	     && !(code == ASHIFTRT && GET_CODE (varop) == XOR
8987 		  && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
8988 					     shift_mode)))
8989 	    {
8990 	      rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8991 					      XEXP (varop, 0), count);
8992 	      rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8993 					      XEXP (varop, 1), count);
8994 
8995 	      varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
8996 					   lhs, rhs);
8997 	      varop = apply_distributive_law (varop);
8998 
8999 	      count = 0;
9000 	      continue;
9001 	    }
9002 	  break;
9003 
9004 	case EQ:
9005 	  /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9006 	     says that the sign bit can be tested, FOO has mode MODE, C is
9007 	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9008 	     that may be nonzero.  */
9009 	  if (code == LSHIFTRT
9010 	      && XEXP (varop, 1) == const0_rtx
9011 	      && GET_MODE (XEXP (varop, 0)) == result_mode
9012 	      && count == (GET_MODE_BITSIZE (result_mode) - 1)
9013 	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9014 	      && STORE_FLAG_VALUE == -1
9015 	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9016 	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9017 				  (HOST_WIDE_INT) 1, result_mode,
9018 				  &complement_p))
9019 	    {
9020 	      varop = XEXP (varop, 0);
9021 	      count = 0;
9022 	      continue;
9023 	    }
9024 	  break;
9025 
9026 	case NEG:
9027 	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9028 	     than the number of bits in the mode is equivalent to A.  */
9029 	  if (code == LSHIFTRT
9030 	      && count == (GET_MODE_BITSIZE (result_mode) - 1)
9031 	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9032 	    {
9033 	      varop = XEXP (varop, 0);
9034 	      count = 0;
9035 	      continue;
9036 	    }
9037 
9038 	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
9039 	     NEG outside to allow shifts to combine.  */
9040 	  if (code == ASHIFT
9041 	      && merge_outer_ops (&outer_op, &outer_const, NEG,
9042 				  (HOST_WIDE_INT) 0, result_mode,
9043 				  &complement_p))
9044 	    {
9045 	      varop = XEXP (varop, 0);
9046 	      continue;
9047 	    }
9048 	  break;
9049 
9050 	case PLUS:
9051 	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9052 	     is one less than the number of bits in the mode is
9053 	     equivalent to (xor A 1).  */
9054 	  if (code == LSHIFTRT
9055 	      && count == (GET_MODE_BITSIZE (result_mode) - 1)
9056 	      && XEXP (varop, 1) == constm1_rtx
9057 	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9058 	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9059 				  (HOST_WIDE_INT) 1, result_mode,
9060 				  &complement_p))
9061 	    {
9062 	      count = 0;
9063 	      varop = XEXP (varop, 0);
9064 	      continue;
9065 	    }
9066 
9067 	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9068 	     that might be nonzero in BAR are those being shifted out and those
9069 	     bits are known zero in FOO, we can replace the PLUS with FOO.
9070 	     Similarly in the other operand order.  This code occurs when
9071 	     we are computing the size of a variable-size array.  */
9072 
9073 	  if ((code == ASHIFTRT || code == LSHIFTRT)
9074 	      && count < HOST_BITS_PER_WIDE_INT
9075 	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9076 	      && (nonzero_bits (XEXP (varop, 1), result_mode)
9077 		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9078 	    {
9079 	      varop = XEXP (varop, 0);
9080 	      continue;
9081 	    }
9082 	  else if ((code == ASHIFTRT || code == LSHIFTRT)
9083 		   && count < HOST_BITS_PER_WIDE_INT
9084 		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9085 		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9086 			    >> count)
9087 		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9088 			    & nonzero_bits (XEXP (varop, 1),
9089 						 result_mode)))
9090 	    {
9091 	      varop = XEXP (varop, 1);
9092 	      continue;
9093 	    }
9094 
9095 	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9096 	  if (code == ASHIFT
9097 	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
9098 	      && (new = simplify_const_binary_operation (ASHIFT, result_mode,
9099 							 XEXP (varop, 1),
9100 							 GEN_INT (count))) != 0
9101 	      && GET_CODE (new) == CONST_INT
9102 	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
9103 				  INTVAL (new), result_mode, &complement_p))
9104 	    {
9105 	      varop = XEXP (varop, 0);
9106 	      continue;
9107 	    }
9108 
9109 	  /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9110 	     signbit', and attempt to change the PLUS to an XOR and move it to
9111 	     the outer operation as is done above in the AND/IOR/XOR case
9112 	     leg for shift(logical). See details in logical handling above
9113 	     for reasoning in doing so.  */
9114 	  if (code == LSHIFTRT
9115 	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
9116 	      && mode_signbit_p (result_mode, XEXP (varop, 1))
9117 	      && (new = simplify_const_binary_operation (code, result_mode,
9118 							 XEXP (varop, 1),
9119 							 GEN_INT (count))) != 0
9120 	      && GET_CODE (new) == CONST_INT
9121 	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9122 				  INTVAL (new), result_mode, &complement_p))
9123 	    {
9124 	      varop = XEXP (varop, 0);
9125 	      continue;
9126 	    }
9127 
9128 	  break;
9129 
9130 	case MINUS:
9131 	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9132 	     with C the size of VAROP - 1 and the shift is logical if
9133 	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9134 	     we have a (gt X 0) operation.  If the shift is arithmetic with
9135 	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9136 	     we have a (neg (gt X 0)) operation.  */
9137 
9138 	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9139 	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9140 	      && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9141 	      && (code == LSHIFTRT || code == ASHIFTRT)
9142 	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9143 	      && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9144 	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9145 	    {
9146 	      count = 0;
9147 	      varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9148 				  const0_rtx);
9149 
9150 	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9151 		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9152 
9153 	      continue;
9154 	    }
9155 	  break;
9156 
9157 	case TRUNCATE:
9158 	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9159 	     if the truncate does not affect the value.  */
9160 	  if (code == LSHIFTRT
9161 	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9162 	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9163 	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
9164 		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9165 		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9166 	    {
9167 	      rtx varop_inner = XEXP (varop, 0);
9168 
9169 	      varop_inner
9170 		= gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9171 				    XEXP (varop_inner, 0),
9172 				    GEN_INT
9173 				    (count + INTVAL (XEXP (varop_inner, 1))));
9174 	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9175 	      count = 0;
9176 	      continue;
9177 	    }
9178 	  break;
9179 
9180 	default:
9181 	  break;
9182 	}
9183 
9184       break;
9185     }
9186 
9187   /* We need to determine what mode to do the shift in.  If the shift is
9188      a right shift or ROTATE, we must always do it in the mode it was
9189      originally done in.  Otherwise, we can do it in MODE, the widest mode
9190      encountered.  The code we care about is that of the shift that will
9191      actually be done, not the shift that was originally requested.  */
9192   shift_mode
9193     = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9194        ? result_mode : mode);
9195 
9196   /* We have now finished analyzing the shift.  The result should be
9197      a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9198      OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
9199      to the result of the shift.  OUTER_CONST is the relevant constant,
9200      but we must turn off all bits turned off in the shift.  */
9201 
9202   if (outer_op == UNKNOWN
9203       && orig_code == code && orig_count == count
9204       && varop == orig_varop
9205       && shift_mode == GET_MODE (varop))
9206     return NULL_RTX;
9207 
9208   /* Make a SUBREG if necessary.  If we can't make it, fail.  */
9209   varop = gen_lowpart (shift_mode, varop);
9210   if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
9211     return NULL_RTX;
9212 
9213   /* If we have an outer operation and we just made a shift, it is
9214      possible that we could have simplified the shift were it not
9215      for the outer operation.  So try to do the simplification
9216      recursively.  */
9217 
9218   if (outer_op != UNKNOWN)
9219     x = simplify_shift_const_1 (code, shift_mode, varop, count);
9220   else
9221     x = NULL_RTX;
9222 
9223   if (x == NULL_RTX)
9224     x = simplify_gen_binary (code, shift_mode, varop, GEN_INT (count));
9225 
9226   /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9227      turn off all the bits that the shift would have turned off.  */
9228   if (orig_code == LSHIFTRT && result_mode != shift_mode)
9229     x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9230 				GET_MODE_MASK (result_mode) >> orig_count);
9231 
9232   /* Do the remainder of the processing in RESULT_MODE.  */
9233   x = gen_lowpart_or_truncate (result_mode, x);
9234 
9235   /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9236      operation.  */
9237   if (complement_p)
9238     x = simplify_gen_unary (NOT, result_mode, x, result_mode);
9239 
9240   if (outer_op != UNKNOWN)
9241     {
9242       if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9243 	outer_const = trunc_int_for_mode (outer_const, result_mode);
9244 
9245       if (outer_op == AND)
9246 	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9247       else if (outer_op == SET)
9248 	{
9249 	  /* This means that we have determined that the result is
9250 	     equivalent to a constant.  This should be rare.  */
9251 	  if (!side_effects_p (x))
9252 	    x = GEN_INT (outer_const);
9253 	}
9254       else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
9255 	x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9256       else
9257 	x = simplify_gen_binary (outer_op, result_mode, x,
9258 				 GEN_INT (outer_const));
9259     }
9260 
9261   return x;
9262 }
9263 
9264 /* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9265    The result of the shift is RESULT_MODE.  If we cannot simplify it,
9266    return X or, if it is NULL, synthesize the expression with
9267    simplify_gen_binary.  Otherwise, return a simplified value.
9268 
9269    The shift is normally computed in the widest mode we find in VAROP, as
9270    long as it isn't a different number of words than RESULT_MODE.  Exceptions
9271    are ASHIFTRT and ROTATE, which are always done in their original mode.  */
9272 
9273 static rtx
simplify_shift_const(rtx x,enum rtx_code code,enum machine_mode result_mode,rtx varop,int count)9274 simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
9275 		      rtx varop, int count)
9276 {
9277   rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
9278   if (tem)
9279     return tem;
9280 
9281   if (!x)
9282     x = simplify_gen_binary (code, GET_MODE (varop), varop, GEN_INT (count));
9283   if (GET_MODE (x) != result_mode)
9284     x = gen_lowpart (result_mode, x);
9285   return x;
9286 }
9287 
9288 
9289 /* Like recog, but we receive the address of a pointer to a new pattern.
9290    We try to match the rtx that the pointer points to.
9291    If that fails, we may try to modify or replace the pattern,
9292    storing the replacement into the same pointer object.
9293 
9294    Modifications include deletion or addition of CLOBBERs.
9295 
9296    PNOTES is a pointer to a location where any REG_UNUSED notes added for
9297    the CLOBBERs are placed.
9298 
9299    The value is the final insn code from the pattern ultimately matched,
9300    or -1.  */
9301 
9302 static int
recog_for_combine(rtx * pnewpat,rtx insn,rtx * pnotes)9303 recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
9304 {
9305   rtx pat = *pnewpat;
9306   int insn_code_number;
9307   int num_clobbers_to_add = 0;
9308   int i;
9309   rtx notes = 0;
9310   rtx old_notes, old_pat;
9311 
9312   /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9313      we use to indicate that something didn't match.  If we find such a
9314      thing, force rejection.  */
9315   if (GET_CODE (pat) == PARALLEL)
9316     for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9317       if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9318 	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9319 	return -1;
9320 
9321   old_pat = PATTERN (insn);
9322   old_notes = REG_NOTES (insn);
9323   PATTERN (insn) = pat;
9324   REG_NOTES (insn) = 0;
9325 
9326   insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9327 
9328   /* If it isn't, there is the possibility that we previously had an insn
9329      that clobbered some register as a side effect, but the combined
9330      insn doesn't need to do that.  So try once more without the clobbers
9331      unless this represents an ASM insn.  */
9332 
9333   if (insn_code_number < 0 && ! check_asm_operands (pat)
9334       && GET_CODE (pat) == PARALLEL)
9335     {
9336       int pos;
9337 
9338       for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9339 	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9340 	  {
9341 	    if (i != pos)
9342 	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9343 	    pos++;
9344 	  }
9345 
9346       SUBST_INT (XVECLEN (pat, 0), pos);
9347 
9348       if (pos == 1)
9349 	pat = XVECEXP (pat, 0, 0);
9350 
9351       PATTERN (insn) = pat;
9352       insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9353     }
9354   PATTERN (insn) = old_pat;
9355   REG_NOTES (insn) = old_notes;
9356 
9357   /* Recognize all noop sets, these will be killed by followup pass.  */
9358   if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9359     insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9360 
9361   /* If we had any clobbers to add, make a new pattern than contains
9362      them.  Then check to make sure that all of them are dead.  */
9363   if (num_clobbers_to_add)
9364     {
9365       rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9366 				     rtvec_alloc (GET_CODE (pat) == PARALLEL
9367 						  ? (XVECLEN (pat, 0)
9368 						     + num_clobbers_to_add)
9369 						  : num_clobbers_to_add + 1));
9370 
9371       if (GET_CODE (pat) == PARALLEL)
9372 	for (i = 0; i < XVECLEN (pat, 0); i++)
9373 	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9374       else
9375 	XVECEXP (newpat, 0, 0) = pat;
9376 
9377       add_clobbers (newpat, insn_code_number);
9378 
9379       for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9380 	   i < XVECLEN (newpat, 0); i++)
9381 	{
9382 	  if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
9383 	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9384 	    return -1;
9385 	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9386 				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9387 	}
9388       pat = newpat;
9389     }
9390 
9391   *pnewpat = pat;
9392   *pnotes = notes;
9393 
9394   return insn_code_number;
9395 }
9396 
9397 /* Like gen_lowpart_general but for use by combine.  In combine it
9398    is not possible to create any new pseudoregs.  However, it is
9399    safe to create invalid memory addresses, because combine will
9400    try to recognize them and all they will do is make the combine
9401    attempt fail.
9402 
9403    If for some reason this cannot do its job, an rtx
9404    (clobber (const_int 0)) is returned.
9405    An insn containing that will not be recognized.  */
9406 
9407 static rtx
gen_lowpart_for_combine(enum machine_mode omode,rtx x)9408 gen_lowpart_for_combine (enum machine_mode omode, rtx x)
9409 {
9410   enum machine_mode imode = GET_MODE (x);
9411   unsigned int osize = GET_MODE_SIZE (omode);
9412   unsigned int isize = GET_MODE_SIZE (imode);
9413   rtx result;
9414 
9415   if (omode == imode)
9416     return x;
9417 
9418   /* Return identity if this is a CONST or symbolic reference.  */
9419   if (omode == Pmode
9420       && (GET_CODE (x) == CONST
9421 	  || GET_CODE (x) == SYMBOL_REF
9422 	  || GET_CODE (x) == LABEL_REF))
9423     return x;
9424 
9425   /* We can only support MODE being wider than a word if X is a
9426      constant integer or has a mode the same size.  */
9427   if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
9428       && ! ((imode == VOIDmode
9429 	     && (GET_CODE (x) == CONST_INT
9430 		 || GET_CODE (x) == CONST_DOUBLE))
9431 	    || isize == osize))
9432     goto fail;
9433 
9434   /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
9435      won't know what to do.  So we will strip off the SUBREG here and
9436      process normally.  */
9437   if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
9438     {
9439       x = SUBREG_REG (x);
9440 
9441       /* For use in case we fall down into the address adjustments
9442 	 further below, we need to adjust the known mode and size of
9443 	 x; imode and isize, since we just adjusted x.  */
9444       imode = GET_MODE (x);
9445 
9446       if (imode == omode)
9447 	return x;
9448 
9449       isize = GET_MODE_SIZE (imode);
9450     }
9451 
9452   result = gen_lowpart_common (omode, x);
9453 
9454 #ifdef CANNOT_CHANGE_MODE_CLASS
9455   if (result != 0 && GET_CODE (result) == SUBREG)
9456     record_subregs_of_mode (result);
9457 #endif
9458 
9459   if (result)
9460     return result;
9461 
9462   if (MEM_P (x))
9463     {
9464       int offset = 0;
9465 
9466       /* Refuse to work on a volatile memory ref or one with a mode-dependent
9467 	 address.  */
9468       if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9469 	goto fail;
9470 
9471       /* If we want to refer to something bigger than the original memref,
9472 	 generate a paradoxical subreg instead.  That will force a reload
9473 	 of the original memref X.  */
9474       if (isize < osize)
9475 	return gen_rtx_SUBREG (omode, x, 0);
9476 
9477       if (WORDS_BIG_ENDIAN)
9478 	offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
9479 
9480       /* Adjust the address so that the address-after-the-data is
9481 	 unchanged.  */
9482       if (BYTES_BIG_ENDIAN)
9483 	offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
9484 
9485       return adjust_address_nv (x, omode, offset);
9486     }
9487 
9488   /* If X is a comparison operator, rewrite it in a new mode.  This
9489      probably won't match, but may allow further simplifications.  */
9490   else if (COMPARISON_P (x))
9491     return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
9492 
9493   /* If we couldn't simplify X any other way, just enclose it in a
9494      SUBREG.  Normally, this SUBREG won't match, but some patterns may
9495      include an explicit SUBREG or we may simplify it further in combine.  */
9496   else
9497     {
9498       int offset = 0;
9499       rtx res;
9500 
9501       offset = subreg_lowpart_offset (omode, imode);
9502       if (imode == VOIDmode)
9503 	{
9504 	  imode = int_mode_for_mode (omode);
9505 	  x = gen_lowpart_common (imode, x);
9506 	  if (x == NULL)
9507 	    goto fail;
9508 	}
9509       res = simplify_gen_subreg (omode, x, imode, offset);
9510       if (res)
9511 	return res;
9512     }
9513 
9514  fail:
9515   return gen_rtx_CLOBBER (imode, const0_rtx);
9516 }
9517 
9518 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
9519    comparison code that will be tested.
9520 
9521    The result is a possibly different comparison code to use.  *POP0 and
9522    *POP1 may be updated.
9523 
9524    It is possible that we might detect that a comparison is either always
9525    true or always false.  However, we do not perform general constant
9526    folding in combine, so this knowledge isn't useful.  Such tautologies
9527    should have been detected earlier.  Hence we ignore all such cases.  */
9528 
9529 static enum rtx_code
simplify_comparison(enum rtx_code code,rtx * pop0,rtx * pop1)9530 simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
9531 {
9532   rtx op0 = *pop0;
9533   rtx op1 = *pop1;
9534   rtx tem, tem1;
9535   int i;
9536   enum machine_mode mode, tmode;
9537 
9538   /* Try a few ways of applying the same transformation to both operands.  */
9539   while (1)
9540     {
9541 #ifndef WORD_REGISTER_OPERATIONS
9542       /* The test below this one won't handle SIGN_EXTENDs on these machines,
9543 	 so check specially.  */
9544       if (code != GTU && code != GEU && code != LTU && code != LEU
9545 	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9546 	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
9547 	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
9548 	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9549 	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9550 	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9551 	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9552 	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9553 	  && XEXP (op0, 1) == XEXP (op1, 1)
9554 	  && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9555 	  && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
9556 	  && (INTVAL (XEXP (op0, 1))
9557 	      == (GET_MODE_BITSIZE (GET_MODE (op0))
9558 		  - (GET_MODE_BITSIZE
9559 		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9560 	{
9561 	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9562 	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9563 	}
9564 #endif
9565 
9566       /* If both operands are the same constant shift, see if we can ignore the
9567 	 shift.  We can if the shift is a rotate or if the bits shifted out of
9568 	 this shift are known to be zero for both inputs and if the type of
9569 	 comparison is compatible with the shift.  */
9570       if (GET_CODE (op0) == GET_CODE (op1)
9571 	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9572 	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9573 	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9574 		  && (code != GT && code != LT && code != GE && code != LE))
9575 	      || (GET_CODE (op0) == ASHIFTRT
9576 		  && (code != GTU && code != LTU
9577 		      && code != GEU && code != LEU)))
9578 	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9579 	  && INTVAL (XEXP (op0, 1)) >= 0
9580 	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9581 	  && XEXP (op0, 1) == XEXP (op1, 1))
9582 	{
9583 	  enum machine_mode mode = GET_MODE (op0);
9584 	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9585 	  int shift_count = INTVAL (XEXP (op0, 1));
9586 
9587 	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9588 	    mask &= (mask >> shift_count) << shift_count;
9589 	  else if (GET_CODE (op0) == ASHIFT)
9590 	    mask = (mask & (mask << shift_count)) >> shift_count;
9591 
9592 	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9593 	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
9594 	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9595 	  else
9596 	    break;
9597 	}
9598 
9599       /* If both operands are AND's of a paradoxical SUBREG by constant, the
9600 	 SUBREGs are of the same mode, and, in both cases, the AND would
9601 	 be redundant if the comparison was done in the narrower mode,
9602 	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9603 	 and the operand's possibly nonzero bits are 0xffffff01; in that case
9604 	 if we only care about QImode, we don't need the AND).  This case
9605 	 occurs if the output mode of an scc insn is not SImode and
9606 	 STORE_FLAG_VALUE == 1 (e.g., the 386).
9607 
9608 	 Similarly, check for a case where the AND's are ZERO_EXTEND
9609 	 operations from some narrower mode even though a SUBREG is not
9610 	 present.  */
9611 
9612       else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9613 	       && GET_CODE (XEXP (op0, 1)) == CONST_INT
9614 	       && GET_CODE (XEXP (op1, 1)) == CONST_INT)
9615 	{
9616 	  rtx inner_op0 = XEXP (op0, 0);
9617 	  rtx inner_op1 = XEXP (op1, 0);
9618 	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9619 	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9620 	  int changed = 0;
9621 
9622 	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9623 	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
9624 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9625 	      && (GET_MODE (SUBREG_REG (inner_op0))
9626 		  == GET_MODE (SUBREG_REG (inner_op1)))
9627 	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
9628 		  <= HOST_BITS_PER_WIDE_INT)
9629 	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9630 					     GET_MODE (SUBREG_REG (inner_op0)))))
9631 	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9632 					     GET_MODE (SUBREG_REG (inner_op1))))))
9633 	    {
9634 	      op0 = SUBREG_REG (inner_op0);
9635 	      op1 = SUBREG_REG (inner_op1);
9636 
9637 	      /* The resulting comparison is always unsigned since we masked
9638 		 off the original sign bit.  */
9639 	      code = unsigned_condition (code);
9640 
9641 	      changed = 1;
9642 	    }
9643 
9644 	  else if (c0 == c1)
9645 	    for (tmode = GET_CLASS_NARROWEST_MODE
9646 		 (GET_MODE_CLASS (GET_MODE (op0)));
9647 		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9648 	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
9649 		{
9650 		  op0 = gen_lowpart (tmode, inner_op0);
9651 		  op1 = gen_lowpart (tmode, inner_op1);
9652 		  code = unsigned_condition (code);
9653 		  changed = 1;
9654 		  break;
9655 		}
9656 
9657 	  if (! changed)
9658 	    break;
9659 	}
9660 
9661       /* If both operands are NOT, we can strip off the outer operation
9662 	 and adjust the comparison code for swapped operands; similarly for
9663 	 NEG, except that this must be an equality comparison.  */
9664       else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9665 	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9666 		   && (code == EQ || code == NE)))
9667 	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9668 
9669       else
9670 	break;
9671     }
9672 
9673   /* If the first operand is a constant, swap the operands and adjust the
9674      comparison code appropriately, but don't do this if the second operand
9675      is already a constant integer.  */
9676   if (swap_commutative_operands_p (op0, op1))
9677     {
9678       tem = op0, op0 = op1, op1 = tem;
9679       code = swap_condition (code);
9680     }
9681 
9682   /* We now enter a loop during which we will try to simplify the comparison.
9683      For the most part, we only are concerned with comparisons with zero,
9684      but some things may really be comparisons with zero but not start
9685      out looking that way.  */
9686 
9687   while (GET_CODE (op1) == CONST_INT)
9688     {
9689       enum machine_mode mode = GET_MODE (op0);
9690       unsigned int mode_width = GET_MODE_BITSIZE (mode);
9691       unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9692       int equality_comparison_p;
9693       int sign_bit_comparison_p;
9694       int unsigned_comparison_p;
9695       HOST_WIDE_INT const_op;
9696 
9697       /* We only want to handle integral modes.  This catches VOIDmode,
9698 	 CCmode, and the floating-point modes.  An exception is that we
9699 	 can handle VOIDmode if OP0 is a COMPARE or a comparison
9700 	 operation.  */
9701 
9702       if (GET_MODE_CLASS (mode) != MODE_INT
9703 	  && ! (mode == VOIDmode
9704 		&& (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
9705 	break;
9706 
9707       /* Get the constant we are comparing against and turn off all bits
9708 	 not on in our mode.  */
9709       const_op = INTVAL (op1);
9710       if (mode != VOIDmode)
9711 	const_op = trunc_int_for_mode (const_op, mode);
9712       op1 = GEN_INT (const_op);
9713 
9714       /* If we are comparing against a constant power of two and the value
9715 	 being compared can only have that single bit nonzero (e.g., it was
9716 	 `and'ed with that bit), we can replace this with a comparison
9717 	 with zero.  */
9718       if (const_op
9719 	  && (code == EQ || code == NE || code == GE || code == GEU
9720 	      || code == LT || code == LTU)
9721 	  && mode_width <= HOST_BITS_PER_WIDE_INT
9722 	  && exact_log2 (const_op) >= 0
9723 	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
9724 	{
9725 	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9726 	  op1 = const0_rtx, const_op = 0;
9727 	}
9728 
9729       /* Similarly, if we are comparing a value known to be either -1 or
9730 	 0 with -1, change it to the opposite comparison against zero.  */
9731 
9732       if (const_op == -1
9733 	  && (code == EQ || code == NE || code == GT || code == LE
9734 	      || code == GEU || code == LTU)
9735 	  && num_sign_bit_copies (op0, mode) == mode_width)
9736 	{
9737 	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9738 	  op1 = const0_rtx, const_op = 0;
9739 	}
9740 
9741       /* Do some canonicalizations based on the comparison code.  We prefer
9742 	 comparisons against zero and then prefer equality comparisons.
9743 	 If we can reduce the size of a constant, we will do that too.  */
9744 
9745       switch (code)
9746 	{
9747 	case LT:
9748 	  /* < C is equivalent to <= (C - 1) */
9749 	  if (const_op > 0)
9750 	    {
9751 	      const_op -= 1;
9752 	      op1 = GEN_INT (const_op);
9753 	      code = LE;
9754 	      /* ... fall through to LE case below.  */
9755 	    }
9756 	  else
9757 	    break;
9758 
9759 	case LE:
9760 	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
9761 	  if (const_op < 0)
9762 	    {
9763 	      const_op += 1;
9764 	      op1 = GEN_INT (const_op);
9765 	      code = LT;
9766 	    }
9767 
9768 	  /* If we are doing a <= 0 comparison on a value known to have
9769 	     a zero sign bit, we can replace this with == 0.  */
9770 	  else if (const_op == 0
9771 		   && mode_width <= HOST_BITS_PER_WIDE_INT
9772 		   && (nonzero_bits (op0, mode)
9773 		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9774 	    code = EQ;
9775 	  break;
9776 
9777 	case GE:
9778 	  /* >= C is equivalent to > (C - 1).  */
9779 	  if (const_op > 0)
9780 	    {
9781 	      const_op -= 1;
9782 	      op1 = GEN_INT (const_op);
9783 	      code = GT;
9784 	      /* ... fall through to GT below.  */
9785 	    }
9786 	  else
9787 	    break;
9788 
9789 	case GT:
9790 	  /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
9791 	  if (const_op < 0)
9792 	    {
9793 	      const_op += 1;
9794 	      op1 = GEN_INT (const_op);
9795 	      code = GE;
9796 	    }
9797 
9798 	  /* If we are doing a > 0 comparison on a value known to have
9799 	     a zero sign bit, we can replace this with != 0.  */
9800 	  else if (const_op == 0
9801 		   && mode_width <= HOST_BITS_PER_WIDE_INT
9802 		   && (nonzero_bits (op0, mode)
9803 		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9804 	    code = NE;
9805 	  break;
9806 
9807 	case LTU:
9808 	  /* < C is equivalent to <= (C - 1).  */
9809 	  if (const_op > 0)
9810 	    {
9811 	      const_op -= 1;
9812 	      op1 = GEN_INT (const_op);
9813 	      code = LEU;
9814 	      /* ... fall through ...  */
9815 	    }
9816 
9817 	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
9818 	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9819 		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9820 	    {
9821 	      const_op = 0, op1 = const0_rtx;
9822 	      code = GE;
9823 	      break;
9824 	    }
9825 	  else
9826 	    break;
9827 
9828 	case LEU:
9829 	  /* unsigned <= 0 is equivalent to == 0 */
9830 	  if (const_op == 0)
9831 	    code = EQ;
9832 
9833 	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
9834 	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9835 		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9836 	    {
9837 	      const_op = 0, op1 = const0_rtx;
9838 	      code = GE;
9839 	    }
9840 	  break;
9841 
9842 	case GEU:
9843 	  /* >= C is equivalent to > (C - 1).  */
9844 	  if (const_op > 1)
9845 	    {
9846 	      const_op -= 1;
9847 	      op1 = GEN_INT (const_op);
9848 	      code = GTU;
9849 	      /* ... fall through ...  */
9850 	    }
9851 
9852 	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
9853 	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9854 		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9855 	    {
9856 	      const_op = 0, op1 = const0_rtx;
9857 	      code = LT;
9858 	      break;
9859 	    }
9860 	  else
9861 	    break;
9862 
9863 	case GTU:
9864 	  /* unsigned > 0 is equivalent to != 0 */
9865 	  if (const_op == 0)
9866 	    code = NE;
9867 
9868 	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
9869 	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9870 		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9871 	    {
9872 	      const_op = 0, op1 = const0_rtx;
9873 	      code = LT;
9874 	    }
9875 	  break;
9876 
9877 	default:
9878 	  break;
9879 	}
9880 
9881       /* Compute some predicates to simplify code below.  */
9882 
9883       equality_comparison_p = (code == EQ || code == NE);
9884       sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9885       unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9886 			       || code == GEU);
9887 
9888       /* If this is a sign bit comparison and we can do arithmetic in
9889 	 MODE, say that we will only be needing the sign bit of OP0.  */
9890       if (sign_bit_comparison_p
9891 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9892 	op0 = force_to_mode (op0, mode,
9893 			     ((HOST_WIDE_INT) 1
9894 			      << (GET_MODE_BITSIZE (mode) - 1)),
9895 			     0);
9896 
9897       /* Now try cases based on the opcode of OP0.  If none of the cases
9898 	 does a "continue", we exit this loop immediately after the
9899 	 switch.  */
9900 
9901       switch (GET_CODE (op0))
9902 	{
9903 	case ZERO_EXTRACT:
9904 	  /* If we are extracting a single bit from a variable position in
9905 	     a constant that has only a single bit set and are comparing it
9906 	     with zero, we can convert this into an equality comparison
9907 	     between the position and the location of the single bit.  */
9908 	  /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
9909 	     have already reduced the shift count modulo the word size.  */
9910 	  if (!SHIFT_COUNT_TRUNCATED
9911 	      && GET_CODE (XEXP (op0, 0)) == CONST_INT
9912 	      && XEXP (op0, 1) == const1_rtx
9913 	      && equality_comparison_p && const_op == 0
9914 	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
9915 	    {
9916 	      if (BITS_BIG_ENDIAN)
9917 		{
9918 		  enum machine_mode new_mode
9919 		    = mode_for_extraction (EP_extzv, 1);
9920 		  if (new_mode == MAX_MACHINE_MODE)
9921 		    i = BITS_PER_WORD - 1 - i;
9922 		  else
9923 		    {
9924 		      mode = new_mode;
9925 		      i = (GET_MODE_BITSIZE (mode) - 1 - i);
9926 		    }
9927 		}
9928 
9929 	      op0 = XEXP (op0, 2);
9930 	      op1 = GEN_INT (i);
9931 	      const_op = i;
9932 
9933 	      /* Result is nonzero iff shift count is equal to I.  */
9934 	      code = reverse_condition (code);
9935 	      continue;
9936 	    }
9937 
9938 	  /* ... fall through ...  */
9939 
9940 	case SIGN_EXTRACT:
9941 	  tem = expand_compound_operation (op0);
9942 	  if (tem != op0)
9943 	    {
9944 	      op0 = tem;
9945 	      continue;
9946 	    }
9947 	  break;
9948 
9949 	case NOT:
9950 	  /* If testing for equality, we can take the NOT of the constant.  */
9951 	  if (equality_comparison_p
9952 	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9953 	    {
9954 	      op0 = XEXP (op0, 0);
9955 	      op1 = tem;
9956 	      continue;
9957 	    }
9958 
9959 	  /* If just looking at the sign bit, reverse the sense of the
9960 	     comparison.  */
9961 	  if (sign_bit_comparison_p)
9962 	    {
9963 	      op0 = XEXP (op0, 0);
9964 	      code = (code == GE ? LT : GE);
9965 	      continue;
9966 	    }
9967 	  break;
9968 
9969 	case NEG:
9970 	  /* If testing for equality, we can take the NEG of the constant.  */
9971 	  if (equality_comparison_p
9972 	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9973 	    {
9974 	      op0 = XEXP (op0, 0);
9975 	      op1 = tem;
9976 	      continue;
9977 	    }
9978 
9979 	  /* The remaining cases only apply to comparisons with zero.  */
9980 	  if (const_op != 0)
9981 	    break;
9982 
9983 	  /* When X is ABS or is known positive,
9984 	     (neg X) is < 0 if and only if X != 0.  */
9985 
9986 	  if (sign_bit_comparison_p
9987 	      && (GET_CODE (XEXP (op0, 0)) == ABS
9988 		  || (mode_width <= HOST_BITS_PER_WIDE_INT
9989 		      && (nonzero_bits (XEXP (op0, 0), mode)
9990 			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9991 	    {
9992 	      op0 = XEXP (op0, 0);
9993 	      code = (code == LT ? NE : EQ);
9994 	      continue;
9995 	    }
9996 
9997 	  /* If we have NEG of something whose two high-order bits are the
9998 	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
9999 	  if (num_sign_bit_copies (op0, mode) >= 2)
10000 	    {
10001 	      op0 = XEXP (op0, 0);
10002 	      code = swap_condition (code);
10003 	      continue;
10004 	    }
10005 	  break;
10006 
10007 	case ROTATE:
10008 	  /* If we are testing equality and our count is a constant, we
10009 	     can perform the inverse operation on our RHS.  */
10010 	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10011 	      && (tem = simplify_binary_operation (ROTATERT, mode,
10012 						   op1, XEXP (op0, 1))) != 0)
10013 	    {
10014 	      op0 = XEXP (op0, 0);
10015 	      op1 = tem;
10016 	      continue;
10017 	    }
10018 
10019 	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10020 	     a particular bit.  Convert it to an AND of a constant of that
10021 	     bit.  This will be converted into a ZERO_EXTRACT.  */
10022 	  if (const_op == 0 && sign_bit_comparison_p
10023 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10024 	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10025 	    {
10026 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10027 					    ((HOST_WIDE_INT) 1
10028 					     << (mode_width - 1
10029 						 - INTVAL (XEXP (op0, 1)))));
10030 	      code = (code == LT ? NE : EQ);
10031 	      continue;
10032 	    }
10033 
10034 	  /* Fall through.  */
10035 
10036 	case ABS:
10037 	  /* ABS is ignorable inside an equality comparison with zero.  */
10038 	  if (const_op == 0 && equality_comparison_p)
10039 	    {
10040 	      op0 = XEXP (op0, 0);
10041 	      continue;
10042 	    }
10043 	  break;
10044 
10045 	case SIGN_EXTEND:
10046 	  /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10047 	     (compare FOO CONST) if CONST fits in FOO's mode and we
10048 	     are either testing inequality or have an unsigned
10049 	     comparison with ZERO_EXTEND or a signed comparison with
10050 	     SIGN_EXTEND.  But don't do it if we don't have a compare
10051 	     insn of the given mode, since we'd have to revert it
10052 	     later on, and then we wouldn't know whether to sign- or
10053 	     zero-extend.  */
10054 	  mode = GET_MODE (XEXP (op0, 0));
10055 	  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10056 	      && ! unsigned_comparison_p
10057 	      && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10058 	      && ((unsigned HOST_WIDE_INT) const_op
10059 		  < (((unsigned HOST_WIDE_INT) 1
10060 		      << (GET_MODE_BITSIZE (mode) - 1))))
10061 	      && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10062 	    {
10063 	      op0 = XEXP (op0, 0);
10064 	      continue;
10065 	    }
10066 	  break;
10067 
10068 	case SUBREG:
10069 	  /* Check for the case where we are comparing A - C1 with C2, that is
10070 
10071 	       (subreg:MODE (plus (A) (-C1))) op (C2)
10072 
10073 	     with C1 a constant, and try to lift the SUBREG, i.e. to do the
10074 	     comparison in the wider mode.  One of the following two conditions
10075 	     must be true in order for this to be valid:
10076 
10077 	       1. The mode extension results in the same bit pattern being added
10078 		  on both sides and the comparison is equality or unsigned.  As
10079 		  C2 has been truncated to fit in MODE, the pattern can only be
10080 		  all 0s or all 1s.
10081 
10082 	       2. The mode extension results in the sign bit being copied on
10083 		  each side.
10084 
10085 	     The difficulty here is that we have predicates for A but not for
10086 	     (A - C1) so we need to check that C1 is within proper bounds so
10087 	     as to perturbate A as little as possible.  */
10088 
10089 	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10090 	      && subreg_lowpart_p (op0)
10091 	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10092 	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10093 	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
10094 	    {
10095 	      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10096 	      rtx a = XEXP (SUBREG_REG (op0), 0);
10097 	      HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10098 
10099 	      if ((c1 > 0
10100 		   && (unsigned HOST_WIDE_INT) c1
10101 		       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10102 		   && (equality_comparison_p || unsigned_comparison_p)
10103 		   /* (A - C1) zero-extends if it is positive and sign-extends
10104 		      if it is negative, C2 both zero- and sign-extends.  */
10105 		   && ((0 == (nonzero_bits (a, inner_mode)
10106 			      & ~GET_MODE_MASK (mode))
10107 			&& const_op >= 0)
10108 		       /* (A - C1) sign-extends if it is positive and 1-extends
10109 			  if it is negative, C2 both sign- and 1-extends.  */
10110 		       || (num_sign_bit_copies (a, inner_mode)
10111 			   > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10112 					     - mode_width)
10113 			   && const_op < 0)))
10114 		  || ((unsigned HOST_WIDE_INT) c1
10115 		       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10116 		      /* (A - C1) always sign-extends, like C2.  */
10117 		      && num_sign_bit_copies (a, inner_mode)
10118 			 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10119 					   - (mode_width - 1))))
10120 		{
10121 		  op0 = SUBREG_REG (op0);
10122 		  continue;
10123 		}
10124 	    }
10125 
10126 	  /* If the inner mode is narrower and we are extracting the low part,
10127 	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10128 	  if (subreg_lowpart_p (op0)
10129 	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10130 	    /* Fall through */ ;
10131 	  else
10132 	    break;
10133 
10134 	  /* ... fall through ...  */
10135 
10136 	case ZERO_EXTEND:
10137 	  mode = GET_MODE (XEXP (op0, 0));
10138 	  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10139 	      && (unsigned_comparison_p || equality_comparison_p)
10140 	      && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10141 	      && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
10142 	      && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10143 	    {
10144 	      op0 = XEXP (op0, 0);
10145 	      continue;
10146 	    }
10147 	  break;
10148 
10149 	case PLUS:
10150 	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10151 	     this for equality comparisons due to pathological cases involving
10152 	     overflows.  */
10153 	  if (equality_comparison_p
10154 	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10155 							op1, XEXP (op0, 1))))
10156 	    {
10157 	      op0 = XEXP (op0, 0);
10158 	      op1 = tem;
10159 	      continue;
10160 	    }
10161 
10162 	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10163 	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10164 	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10165 	    {
10166 	      op0 = XEXP (XEXP (op0, 0), 0);
10167 	      code = (code == LT ? EQ : NE);
10168 	      continue;
10169 	    }
10170 	  break;
10171 
10172 	case MINUS:
10173 	  /* We used to optimize signed comparisons against zero, but that
10174 	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10175 	     arrive here as equality comparisons, or (GEU, LTU) are
10176 	     optimized away.  No need to special-case them.  */
10177 
10178 	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10179 	     (eq B (minus A C)), whichever simplifies.  We can only do
10180 	     this for equality comparisons due to pathological cases involving
10181 	     overflows.  */
10182 	  if (equality_comparison_p
10183 	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10184 							XEXP (op0, 1), op1)))
10185 	    {
10186 	      op0 = XEXP (op0, 0);
10187 	      op1 = tem;
10188 	      continue;
10189 	    }
10190 
10191 	  if (equality_comparison_p
10192 	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10193 							XEXP (op0, 0), op1)))
10194 	    {
10195 	      op0 = XEXP (op0, 1);
10196 	      op1 = tem;
10197 	      continue;
10198 	    }
10199 
10200 	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10201 	     of bits in X minus 1, is one iff X > 0.  */
10202 	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10203 	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10204 	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10205 		 == mode_width - 1
10206 	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10207 	    {
10208 	      op0 = XEXP (op0, 1);
10209 	      code = (code == GE ? LE : GT);
10210 	      continue;
10211 	    }
10212 	  break;
10213 
10214 	case XOR:
10215 	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10216 	     if C is zero or B is a constant.  */
10217 	  if (equality_comparison_p
10218 	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10219 							XEXP (op0, 1), op1)))
10220 	    {
10221 	      op0 = XEXP (op0, 0);
10222 	      op1 = tem;
10223 	      continue;
10224 	    }
10225 	  break;
10226 
10227 	case EQ:  case NE:
10228 	case UNEQ:  case LTGT:
10229 	case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10230 	case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10231 	case UNORDERED: case ORDERED:
10232 	  /* We can't do anything if OP0 is a condition code value, rather
10233 	     than an actual data value.  */
10234 	  if (const_op != 0
10235 	      || CC0_P (XEXP (op0, 0))
10236 	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10237 	    break;
10238 
10239 	  /* Get the two operands being compared.  */
10240 	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10241 	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10242 	  else
10243 	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10244 
10245 	  /* Check for the cases where we simply want the result of the
10246 	     earlier test or the opposite of that result.  */
10247 	  if (code == NE || code == EQ
10248 	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10249 		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10250 		  && (STORE_FLAG_VALUE
10251 		      & (((HOST_WIDE_INT) 1
10252 			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10253 		  && (code == LT || code == GE)))
10254 	    {
10255 	      enum rtx_code new_code;
10256 	      if (code == LT || code == NE)
10257 		new_code = GET_CODE (op0);
10258 	      else
10259 		new_code = reversed_comparison_code (op0, NULL);
10260 
10261 	      if (new_code != UNKNOWN)
10262 		{
10263 		  code = new_code;
10264 		  op0 = tem;
10265 		  op1 = tem1;
10266 		  continue;
10267 		}
10268 	    }
10269 	  break;
10270 
10271 	case IOR:
10272 	  /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10273 	     iff X <= 0.  */
10274 	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10275 	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10276 	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10277 	    {
10278 	      op0 = XEXP (op0, 1);
10279 	      code = (code == GE ? GT : LE);
10280 	      continue;
10281 	    }
10282 	  break;
10283 
10284 	case AND:
10285 	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10286 	     will be converted to a ZERO_EXTRACT later.  */
10287 	  if (const_op == 0 && equality_comparison_p
10288 	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10289 	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10290 	    {
10291 	      op0 = simplify_and_const_int
10292 		(NULL_RTX, mode, gen_rtx_LSHIFTRT (mode,
10293 						   XEXP (op0, 1),
10294 						   XEXP (XEXP (op0, 0), 1)),
10295 		 (HOST_WIDE_INT) 1);
10296 	      continue;
10297 	    }
10298 
10299 	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10300 	     zero and X is a comparison and C1 and C2 describe only bits set
10301 	     in STORE_FLAG_VALUE, we can compare with X.  */
10302 	  if (const_op == 0 && equality_comparison_p
10303 	      && mode_width <= HOST_BITS_PER_WIDE_INT
10304 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10305 	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10306 	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10307 	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10308 	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10309 	    {
10310 	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10311 		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10312 	      if ((~STORE_FLAG_VALUE & mask) == 0
10313 		  && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
10314 		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10315 			  && COMPARISON_P (tem))))
10316 		{
10317 		  op0 = XEXP (XEXP (op0, 0), 0);
10318 		  continue;
10319 		}
10320 	    }
10321 
10322 	  /* If we are doing an equality comparison of an AND of a bit equal
10323 	     to the sign bit, replace this with a LT or GE comparison of
10324 	     the underlying value.  */
10325 	  if (equality_comparison_p
10326 	      && const_op == 0
10327 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10328 	      && mode_width <= HOST_BITS_PER_WIDE_INT
10329 	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10330 		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10331 	    {
10332 	      op0 = XEXP (op0, 0);
10333 	      code = (code == EQ ? GE : LT);
10334 	      continue;
10335 	    }
10336 
10337 	  /* If this AND operation is really a ZERO_EXTEND from a narrower
10338 	     mode, the constant fits within that mode, and this is either an
10339 	     equality or unsigned comparison, try to do this comparison in
10340 	     the narrower mode.
10341 
10342 	     Note that in:
10343 
10344 	     (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
10345 	     -> (ne:DI (reg:SI 4) (const_int 0))
10346 
10347 	     unless TRULY_NOOP_TRUNCATION allows it or the register is
10348 	     known to hold a value of the required mode the
10349 	     transformation is invalid.  */
10350 	  if ((equality_comparison_p || unsigned_comparison_p)
10351 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10352 	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10353 				   & GET_MODE_MASK (mode))
10354 				  + 1)) >= 0
10355 	      && const_op >> i == 0
10356 	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode
10357 	      && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
10358 					 GET_MODE_BITSIZE (GET_MODE (op0)))
10359 		  || (REG_P (XEXP (op0, 0))
10360 		      && reg_truncated_to_mode (tmode, XEXP (op0, 0)))))
10361 	    {
10362 	      op0 = gen_lowpart (tmode, XEXP (op0, 0));
10363 	      continue;
10364 	    }
10365 
10366 	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10367 	     fits in both M1 and M2 and the SUBREG is either paradoxical
10368 	     or represents the low part, permute the SUBREG and the AND
10369 	     and try again.  */
10370 	  if (GET_CODE (XEXP (op0, 0)) == SUBREG)
10371 	    {
10372 	      unsigned HOST_WIDE_INT c1;
10373 	      tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
10374 	      /* Require an integral mode, to avoid creating something like
10375 		 (AND:SF ...).  */
10376 	      if (SCALAR_INT_MODE_P (tmode)
10377 		  /* It is unsafe to commute the AND into the SUBREG if the
10378 		     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10379 		     not defined.  As originally written the upper bits
10380 		     have a defined value due to the AND operation.
10381 		     However, if we commute the AND inside the SUBREG then
10382 		     they no longer have defined values and the meaning of
10383 		     the code has been changed.  */
10384 		  && (0
10385 #ifdef WORD_REGISTER_OPERATIONS
10386 		      || (mode_width > GET_MODE_BITSIZE (tmode)
10387 			  && mode_width <= BITS_PER_WORD)
10388 #endif
10389 		      || (mode_width <= GET_MODE_BITSIZE (tmode)
10390 			  && subreg_lowpart_p (XEXP (op0, 0))))
10391 		  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10392 		  && mode_width <= HOST_BITS_PER_WIDE_INT
10393 		  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
10394 		  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
10395 		  && (c1 & ~GET_MODE_MASK (tmode)) == 0
10396 		  && c1 != mask
10397 		  && c1 != GET_MODE_MASK (tmode))
10398 		{
10399 		  op0 = simplify_gen_binary (AND, tmode,
10400 					     SUBREG_REG (XEXP (op0, 0)),
10401 					     gen_int_mode (c1, tmode));
10402 		  op0 = gen_lowpart (mode, op0);
10403 		  continue;
10404 		}
10405 	    }
10406 
10407 	  /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
10408 	  if (const_op == 0 && equality_comparison_p
10409 	      && XEXP (op0, 1) == const1_rtx
10410 	      && GET_CODE (XEXP (op0, 0)) == NOT)
10411 	    {
10412 	      op0 = simplify_and_const_int
10413 		(NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
10414 	      code = (code == NE ? EQ : NE);
10415 	      continue;
10416 	    }
10417 
10418 	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10419 	     (eq (and (lshiftrt X) 1) 0).
10420 	     Also handle the case where (not X) is expressed using xor.  */
10421 	  if (const_op == 0 && equality_comparison_p
10422 	      && XEXP (op0, 1) == const1_rtx
10423 	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
10424 	    {
10425 	      rtx shift_op = XEXP (XEXP (op0, 0), 0);
10426 	      rtx shift_count = XEXP (XEXP (op0, 0), 1);
10427 
10428 	      if (GET_CODE (shift_op) == NOT
10429 		  || (GET_CODE (shift_op) == XOR
10430 		      && GET_CODE (XEXP (shift_op, 1)) == CONST_INT
10431 		      && GET_CODE (shift_count) == CONST_INT
10432 		      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10433 		      && (INTVAL (XEXP (shift_op, 1))
10434 			  == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
10435 		{
10436 		  op0 = simplify_and_const_int
10437 		    (NULL_RTX, mode,
10438 		     gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
10439 		     (HOST_WIDE_INT) 1);
10440 		  code = (code == NE ? EQ : NE);
10441 		  continue;
10442 		}
10443 	    }
10444 	  break;
10445 
10446 	case ASHIFT:
10447 	  /* If we have (compare (ashift FOO N) (const_int C)) and
10448 	     the high order N bits of FOO (N+1 if an inequality comparison)
10449 	     are known to be zero, we can do this by comparing FOO with C
10450 	     shifted right N bits so long as the low-order N bits of C are
10451 	     zero.  */
10452 	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10453 	      && INTVAL (XEXP (op0, 1)) >= 0
10454 	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10455 		  < HOST_BITS_PER_WIDE_INT)
10456 	      && ((const_op
10457 		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10458 	      && mode_width <= HOST_BITS_PER_WIDE_INT
10459 	      && (nonzero_bits (XEXP (op0, 0), mode)
10460 		  & ~(mask >> (INTVAL (XEXP (op0, 1))
10461 			       + ! equality_comparison_p))) == 0)
10462 	    {
10463 	      /* We must perform a logical shift, not an arithmetic one,
10464 		 as we want the top N bits of C to be zero.  */
10465 	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10466 
10467 	      temp >>= INTVAL (XEXP (op0, 1));
10468 	      op1 = gen_int_mode (temp, mode);
10469 	      op0 = XEXP (op0, 0);
10470 	      continue;
10471 	    }
10472 
10473 	  /* If we are doing a sign bit comparison, it means we are testing
10474 	     a particular bit.  Convert it to the appropriate AND.  */
10475 	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10476 	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10477 	    {
10478 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10479 					    ((HOST_WIDE_INT) 1
10480 					     << (mode_width - 1
10481 						 - INTVAL (XEXP (op0, 1)))));
10482 	      code = (code == LT ? NE : EQ);
10483 	      continue;
10484 	    }
10485 
10486 	  /* If this an equality comparison with zero and we are shifting
10487 	     the low bit to the sign bit, we can convert this to an AND of the
10488 	     low-order bit.  */
10489 	  if (const_op == 0 && equality_comparison_p
10490 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10491 	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10492 		 == mode_width - 1)
10493 	    {
10494 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10495 					    (HOST_WIDE_INT) 1);
10496 	      continue;
10497 	    }
10498 	  break;
10499 
10500 	case ASHIFTRT:
10501 	  /* If this is an equality comparison with zero, we can do this
10502 	     as a logical shift, which might be much simpler.  */
10503 	  if (equality_comparison_p && const_op == 0
10504 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10505 	    {
10506 	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10507 					  XEXP (op0, 0),
10508 					  INTVAL (XEXP (op0, 1)));
10509 	      continue;
10510 	    }
10511 
10512 	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10513 	     do the comparison in a narrower mode.  */
10514 	  if (! unsigned_comparison_p
10515 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10516 	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10517 	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10518 	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10519 					 MODE_INT, 1)) != BLKmode
10520 	      && (((unsigned HOST_WIDE_INT) const_op
10521 		   + (GET_MODE_MASK (tmode) >> 1) + 1)
10522 		  <= GET_MODE_MASK (tmode)))
10523 	    {
10524 	      op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
10525 	      continue;
10526 	    }
10527 
10528 	  /* Likewise if OP0 is a PLUS of a sign extension with a
10529 	     constant, which is usually represented with the PLUS
10530 	     between the shifts.  */
10531 	  if (! unsigned_comparison_p
10532 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10533 	      && GET_CODE (XEXP (op0, 0)) == PLUS
10534 	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10535 	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10536 	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10537 	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10538 					 MODE_INT, 1)) != BLKmode
10539 	      && (((unsigned HOST_WIDE_INT) const_op
10540 		   + (GET_MODE_MASK (tmode) >> 1) + 1)
10541 		  <= GET_MODE_MASK (tmode)))
10542 	    {
10543 	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10544 	      rtx add_const = XEXP (XEXP (op0, 0), 1);
10545 	      rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
10546 						   add_const, XEXP (op0, 1));
10547 
10548 	      op0 = simplify_gen_binary (PLUS, tmode,
10549 					 gen_lowpart (tmode, inner),
10550 					 new_const);
10551 	      continue;
10552 	    }
10553 
10554 	  /* ... fall through ...  */
10555 	case LSHIFTRT:
10556 	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10557 	     the low order N bits of FOO are known to be zero, we can do this
10558 	     by comparing FOO with C shifted left N bits so long as no
10559 	     overflow occurs.  */
10560 	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10561 	      && INTVAL (XEXP (op0, 1)) >= 0
10562 	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10563 	      && mode_width <= HOST_BITS_PER_WIDE_INT
10564 	      && (nonzero_bits (XEXP (op0, 0), mode)
10565 		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10566 	      && (((unsigned HOST_WIDE_INT) const_op
10567 		   + (GET_CODE (op0) != LSHIFTRT
10568 		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
10569 			 + 1)
10570 		      : 0))
10571 		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
10572 	    {
10573 	      /* If the shift was logical, then we must make the condition
10574 		 unsigned.  */
10575 	      if (GET_CODE (op0) == LSHIFTRT)
10576 		code = unsigned_condition (code);
10577 
10578 	      const_op <<= INTVAL (XEXP (op0, 1));
10579 	      op1 = GEN_INT (const_op);
10580 	      op0 = XEXP (op0, 0);
10581 	      continue;
10582 	    }
10583 
10584 	  /* If we are using this shift to extract just the sign bit, we
10585 	     can replace this with an LT or GE comparison.  */
10586 	  if (const_op == 0
10587 	      && (equality_comparison_p || sign_bit_comparison_p)
10588 	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10589 	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10590 		 == mode_width - 1)
10591 	    {
10592 	      op0 = XEXP (op0, 0);
10593 	      code = (code == NE || code == GT ? LT : GE);
10594 	      continue;
10595 	    }
10596 	  break;
10597 
10598 	default:
10599 	  break;
10600 	}
10601 
10602       break;
10603     }
10604 
10605   /* Now make any compound operations involved in this comparison.  Then,
10606      check for an outmost SUBREG on OP0 that is not doing anything or is
10607      paradoxical.  The latter transformation must only be performed when
10608      it is known that the "extra" bits will be the same in op0 and op1 or
10609      that they don't matter.  There are three cases to consider:
10610 
10611      1. SUBREG_REG (op0) is a register.  In this case the bits are don't
10612      care bits and we can assume they have any convenient value.  So
10613      making the transformation is safe.
10614 
10615      2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
10616      In this case the upper bits of op0 are undefined.  We should not make
10617      the simplification in that case as we do not know the contents of
10618      those bits.
10619 
10620      3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
10621      UNKNOWN.  In that case we know those bits are zeros or ones.  We must
10622      also be sure that they are the same as the upper bits of op1.
10623 
10624      We can never remove a SUBREG for a non-equality comparison because
10625      the sign bit is in a different place in the underlying object.  */
10626 
10627   op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10628   op1 = make_compound_operation (op1, SET);
10629 
10630   if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10631       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10632       && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
10633       && (code == NE || code == EQ))
10634     {
10635       if (GET_MODE_SIZE (GET_MODE (op0))
10636 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
10637 	{
10638 	  /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
10639 	     implemented.  */
10640 	  if (REG_P (SUBREG_REG (op0)))
10641 	    {
10642 	      op0 = SUBREG_REG (op0);
10643 	      op1 = gen_lowpart (GET_MODE (op0), op1);
10644 	    }
10645 	}
10646       else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10647 		<= HOST_BITS_PER_WIDE_INT)
10648 	       && (nonzero_bits (SUBREG_REG (op0),
10649 				 GET_MODE (SUBREG_REG (op0)))
10650 		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10651 	{
10652 	  tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
10653 
10654 	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10655 	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10656 	    op0 = SUBREG_REG (op0), op1 = tem;
10657 	}
10658     }
10659 
10660   /* We now do the opposite procedure: Some machines don't have compare
10661      insns in all modes.  If OP0's mode is an integer mode smaller than a
10662      word and we can't do a compare in that mode, see if there is a larger
10663      mode for which we can do the compare.  There are a number of cases in
10664      which we can use the wider mode.  */
10665 
10666   mode = GET_MODE (op0);
10667   if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10668       && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10669       && ! have_insn_for (COMPARE, mode))
10670     for (tmode = GET_MODE_WIDER_MODE (mode);
10671 	 (tmode != VOIDmode
10672 	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10673 	 tmode = GET_MODE_WIDER_MODE (tmode))
10674       if (have_insn_for (COMPARE, tmode))
10675 	{
10676 	  int zero_extended;
10677 
10678 	  /* If the only nonzero bits in OP0 and OP1 are those in the
10679 	     narrower mode and this is an equality or unsigned comparison,
10680 	     we can use the wider mode.  Similarly for sign-extended
10681 	     values, in which case it is true for all comparisons.  */
10682 	  zero_extended = ((code == EQ || code == NE
10683 			    || code == GEU || code == GTU
10684 			    || code == LEU || code == LTU)
10685 			   && (nonzero_bits (op0, tmode)
10686 			       & ~GET_MODE_MASK (mode)) == 0
10687 			   && ((GET_CODE (op1) == CONST_INT
10688 				|| (nonzero_bits (op1, tmode)
10689 				    & ~GET_MODE_MASK (mode)) == 0)));
10690 
10691 	  if (zero_extended
10692 	      || ((num_sign_bit_copies (op0, tmode)
10693 		   > (unsigned int) (GET_MODE_BITSIZE (tmode)
10694 				     - GET_MODE_BITSIZE (mode)))
10695 		  && (num_sign_bit_copies (op1, tmode)
10696 		      > (unsigned int) (GET_MODE_BITSIZE (tmode)
10697 					- GET_MODE_BITSIZE (mode)))))
10698 	    {
10699 	      /* If OP0 is an AND and we don't have an AND in MODE either,
10700 		 make a new AND in the proper mode.  */
10701 	      if (GET_CODE (op0) == AND
10702 		  && !have_insn_for (AND, mode))
10703 		op0 = simplify_gen_binary (AND, tmode,
10704 					   gen_lowpart (tmode,
10705 							XEXP (op0, 0)),
10706 					   gen_lowpart (tmode,
10707 							XEXP (op0, 1)));
10708 
10709 	      op0 = gen_lowpart (tmode, op0);
10710 	      if (zero_extended && GET_CODE (op1) == CONST_INT)
10711 		op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
10712 	      op1 = gen_lowpart (tmode, op1);
10713 	      break;
10714 	    }
10715 
10716 	  /* If this is a test for negative, we can make an explicit
10717 	     test of the sign bit.  */
10718 
10719 	  if (op1 == const0_rtx && (code == LT || code == GE)
10720 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10721 	    {
10722 	      op0 = simplify_gen_binary (AND, tmode,
10723 					 gen_lowpart (tmode, op0),
10724 					 GEN_INT ((HOST_WIDE_INT) 1
10725 						  << (GET_MODE_BITSIZE (mode)
10726 						      - 1)));
10727 	      code = (code == LT) ? NE : EQ;
10728 	      break;
10729 	    }
10730 	}
10731 
10732 #ifdef CANONICALIZE_COMPARISON
10733   /* If this machine only supports a subset of valid comparisons, see if we
10734      can convert an unsupported one into a supported one.  */
10735   CANONICALIZE_COMPARISON (code, op0, op1);
10736 #endif
10737 
10738   *pop0 = op0;
10739   *pop1 = op1;
10740 
10741   return code;
10742 }
10743 
10744 /* Utility function for record_value_for_reg.  Count number of
10745    rtxs in X.  */
10746 static int
count_rtxs(rtx x)10747 count_rtxs (rtx x)
10748 {
10749   enum rtx_code code = GET_CODE (x);
10750   const char *fmt;
10751   int i, ret = 1;
10752 
10753   if (GET_RTX_CLASS (code) == '2'
10754       || GET_RTX_CLASS (code) == 'c')
10755     {
10756       rtx x0 = XEXP (x, 0);
10757       rtx x1 = XEXP (x, 1);
10758 
10759       if (x0 == x1)
10760 	return 1 + 2 * count_rtxs (x0);
10761 
10762       if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
10763 	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
10764 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10765 	return 2 + 2 * count_rtxs (x0)
10766 	       + count_rtxs (x == XEXP (x1, 0)
10767 			     ? XEXP (x1, 1) : XEXP (x1, 0));
10768 
10769       if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
10770 	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
10771 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10772 	return 2 + 2 * count_rtxs (x1)
10773 	       + count_rtxs (x == XEXP (x0, 0)
10774 			     ? XEXP (x0, 1) : XEXP (x0, 0));
10775     }
10776 
10777   fmt = GET_RTX_FORMAT (code);
10778   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10779     if (fmt[i] == 'e')
10780       ret += count_rtxs (XEXP (x, i));
10781 
10782   return ret;
10783 }
10784 
10785 /* Utility function for following routine.  Called when X is part of a value
10786    being stored into last_set_value.  Sets last_set_table_tick
10787    for each register mentioned.  Similar to mention_regs in cse.c  */
10788 
10789 static void
update_table_tick(rtx x)10790 update_table_tick (rtx x)
10791 {
10792   enum rtx_code code = GET_CODE (x);
10793   const char *fmt = GET_RTX_FORMAT (code);
10794   int i;
10795 
10796   if (code == REG)
10797     {
10798       unsigned int regno = REGNO (x);
10799       unsigned int endregno
10800 	= regno + (regno < FIRST_PSEUDO_REGISTER
10801 		   ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
10802       unsigned int r;
10803 
10804       for (r = regno; r < endregno; r++)
10805 	reg_stat[r].last_set_table_tick = label_tick;
10806 
10807       return;
10808     }
10809 
10810   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10811     /* Note that we can't have an "E" in values stored; see
10812        get_last_value_validate.  */
10813     if (fmt[i] == 'e')
10814       {
10815 	/* Check for identical subexpressions.  If x contains
10816 	   identical subexpression we only have to traverse one of
10817 	   them.  */
10818 	if (i == 0 && ARITHMETIC_P (x))
10819 	  {
10820 	    /* Note that at this point x1 has already been
10821 	       processed.  */
10822 	    rtx x0 = XEXP (x, 0);
10823 	    rtx x1 = XEXP (x, 1);
10824 
10825 	    /* If x0 and x1 are identical then there is no need to
10826 	       process x0.  */
10827 	    if (x0 == x1)
10828 	      break;
10829 
10830 	    /* If x0 is identical to a subexpression of x1 then while
10831 	       processing x1, x0 has already been processed.  Thus we
10832 	       are done with x.  */
10833 	    if (ARITHMETIC_P (x1)
10834 		&& (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10835 	      break;
10836 
10837 	    /* If x1 is identical to a subexpression of x0 then we
10838 	       still have to process the rest of x0.  */
10839 	    if (ARITHMETIC_P (x0)
10840 		&& (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10841 	      {
10842 		update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
10843 		break;
10844 	      }
10845 	  }
10846 
10847 	update_table_tick (XEXP (x, i));
10848       }
10849 }
10850 
10851 /* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
10852    are saying that the register is clobbered and we no longer know its
10853    value.  If INSN is zero, don't update reg_stat[].last_set; this is
10854    only permitted with VALUE also zero and is used to invalidate the
10855    register.  */
10856 
10857 static void
record_value_for_reg(rtx reg,rtx insn,rtx value)10858 record_value_for_reg (rtx reg, rtx insn, rtx value)
10859 {
10860   unsigned int regno = REGNO (reg);
10861   unsigned int endregno
10862     = regno + (regno < FIRST_PSEUDO_REGISTER
10863 	       ? hard_regno_nregs[regno][GET_MODE (reg)] : 1);
10864   unsigned int i;
10865 
10866   /* If VALUE contains REG and we have a previous value for REG, substitute
10867      the previous value.  */
10868   if (value && insn && reg_overlap_mentioned_p (reg, value))
10869     {
10870       rtx tem;
10871 
10872       /* Set things up so get_last_value is allowed to see anything set up to
10873 	 our insn.  */
10874       subst_low_cuid = INSN_CUID (insn);
10875       tem = get_last_value (reg);
10876 
10877       /* If TEM is simply a binary operation with two CLOBBERs as operands,
10878 	 it isn't going to be useful and will take a lot of time to process,
10879 	 so just use the CLOBBER.  */
10880 
10881       if (tem)
10882 	{
10883 	  if (ARITHMETIC_P (tem)
10884 	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
10885 	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
10886 	    tem = XEXP (tem, 0);
10887 	  else if (count_occurrences (value, reg, 1) >= 2)
10888 	    {
10889 	      /* If there are two or more occurrences of REG in VALUE,
10890 		 prevent the value from growing too much.  */
10891 	      if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
10892 		tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
10893 	    }
10894 
10895 	  value = replace_rtx (copy_rtx (value), reg, tem);
10896 	}
10897     }
10898 
10899   /* For each register modified, show we don't know its value, that
10900      we don't know about its bitwise content, that its value has been
10901      updated, and that we don't know the location of the death of the
10902      register.  */
10903   for (i = regno; i < endregno; i++)
10904     {
10905       if (insn)
10906 	reg_stat[i].last_set = insn;
10907 
10908       reg_stat[i].last_set_value = 0;
10909       reg_stat[i].last_set_mode = 0;
10910       reg_stat[i].last_set_nonzero_bits = 0;
10911       reg_stat[i].last_set_sign_bit_copies = 0;
10912       reg_stat[i].last_death = 0;
10913       reg_stat[i].truncated_to_mode = 0;
10914     }
10915 
10916   /* Mark registers that are being referenced in this value.  */
10917   if (value)
10918     update_table_tick (value);
10919 
10920   /* Now update the status of each register being set.
10921      If someone is using this register in this block, set this register
10922      to invalid since we will get confused between the two lives in this
10923      basic block.  This makes using this register always invalid.  In cse, we
10924      scan the table to invalidate all entries using this register, but this
10925      is too much work for us.  */
10926 
10927   for (i = regno; i < endregno; i++)
10928     {
10929       reg_stat[i].last_set_label = label_tick;
10930       if (!insn || (value && reg_stat[i].last_set_table_tick == label_tick))
10931 	reg_stat[i].last_set_invalid = 1;
10932       else
10933 	reg_stat[i].last_set_invalid = 0;
10934     }
10935 
10936   /* The value being assigned might refer to X (like in "x++;").  In that
10937      case, we must replace it with (clobber (const_int 0)) to prevent
10938      infinite loops.  */
10939   if (value && ! get_last_value_validate (&value, insn,
10940 					  reg_stat[regno].last_set_label, 0))
10941     {
10942       value = copy_rtx (value);
10943       if (! get_last_value_validate (&value, insn,
10944 				     reg_stat[regno].last_set_label, 1))
10945 	value = 0;
10946     }
10947 
10948   /* For the main register being modified, update the value, the mode, the
10949      nonzero bits, and the number of sign bit copies.  */
10950 
10951   reg_stat[regno].last_set_value = value;
10952 
10953   if (value)
10954     {
10955       enum machine_mode mode = GET_MODE (reg);
10956       subst_low_cuid = INSN_CUID (insn);
10957       reg_stat[regno].last_set_mode = mode;
10958       if (GET_MODE_CLASS (mode) == MODE_INT
10959 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10960 	mode = nonzero_bits_mode;
10961       reg_stat[regno].last_set_nonzero_bits = nonzero_bits (value, mode);
10962       reg_stat[regno].last_set_sign_bit_copies
10963 	= num_sign_bit_copies (value, GET_MODE (reg));
10964     }
10965 }
10966 
10967 /* Called via note_stores from record_dead_and_set_regs to handle one
10968    SET or CLOBBER in an insn.  DATA is the instruction in which the
10969    set is occurring.  */
10970 
10971 static void
record_dead_and_set_regs_1(rtx dest,rtx setter,void * data)10972 record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
10973 {
10974   rtx record_dead_insn = (rtx) data;
10975 
10976   if (GET_CODE (dest) == SUBREG)
10977     dest = SUBREG_REG (dest);
10978 
10979   if (!record_dead_insn)
10980     {
10981       if (REG_P (dest))
10982 	record_value_for_reg (dest, NULL_RTX, NULL_RTX);
10983       return;
10984     }
10985 
10986   if (REG_P (dest))
10987     {
10988       /* If we are setting the whole register, we know its value.  Otherwise
10989 	 show that we don't know the value.  We can handle SUBREG in
10990 	 some cases.  */
10991       if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10992 	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10993       else if (GET_CODE (setter) == SET
10994 	       && GET_CODE (SET_DEST (setter)) == SUBREG
10995 	       && SUBREG_REG (SET_DEST (setter)) == dest
10996 	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
10997 	       && subreg_lowpart_p (SET_DEST (setter)))
10998 	record_value_for_reg (dest, record_dead_insn,
10999 			      gen_lowpart (GET_MODE (dest),
11000 						       SET_SRC (setter)));
11001       else
11002 	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11003     }
11004   else if (MEM_P (dest)
11005 	   /* Ignore pushes, they clobber nothing.  */
11006 	   && ! push_operand (dest, GET_MODE (dest)))
11007     mem_last_set = INSN_CUID (record_dead_insn);
11008 }
11009 
11010 /* Update the records of when each REG was most recently set or killed
11011    for the things done by INSN.  This is the last thing done in processing
11012    INSN in the combiner loop.
11013 
11014    We update reg_stat[], in particular fields last_set, last_set_value,
11015    last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11016    last_death, and also the similar information mem_last_set (which insn
11017    most recently modified memory) and last_call_cuid (which insn was the
11018    most recent subroutine call).  */
11019 
11020 static void
record_dead_and_set_regs(rtx insn)11021 record_dead_and_set_regs (rtx insn)
11022 {
11023   rtx link;
11024   unsigned int i;
11025 
11026   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11027     {
11028       if (REG_NOTE_KIND (link) == REG_DEAD
11029 	  && REG_P (XEXP (link, 0)))
11030 	{
11031 	  unsigned int regno = REGNO (XEXP (link, 0));
11032 	  unsigned int endregno
11033 	    = regno + (regno < FIRST_PSEUDO_REGISTER
11034 		       ? hard_regno_nregs[regno][GET_MODE (XEXP (link, 0))]
11035 		       : 1);
11036 
11037 	  for (i = regno; i < endregno; i++)
11038 	    reg_stat[i].last_death = insn;
11039 	}
11040       else if (REG_NOTE_KIND (link) == REG_INC)
11041 	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11042     }
11043 
11044   if (CALL_P (insn))
11045     {
11046       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11047 	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11048 	  {
11049 	    reg_stat[i].last_set_value = 0;
11050 	    reg_stat[i].last_set_mode = 0;
11051 	    reg_stat[i].last_set_nonzero_bits = 0;
11052 	    reg_stat[i].last_set_sign_bit_copies = 0;
11053 	    reg_stat[i].last_death = 0;
11054 	    reg_stat[i].truncated_to_mode = 0;
11055 	  }
11056 
11057       last_call_cuid = mem_last_set = INSN_CUID (insn);
11058 
11059       /* We can't combine into a call pattern.  Remember, though, that
11060 	 the return value register is set at this CUID.  We could
11061 	 still replace a register with the return value from the
11062 	 wrong subroutine call!  */
11063       note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
11064     }
11065   else
11066     note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11067 }
11068 
11069 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11070    register present in the SUBREG, so for each such SUBREG go back and
11071    adjust nonzero and sign bit information of the registers that are
11072    known to have some zero/sign bits set.
11073 
11074    This is needed because when combine blows the SUBREGs away, the
11075    information on zero/sign bits is lost and further combines can be
11076    missed because of that.  */
11077 
11078 static void
record_promoted_value(rtx insn,rtx subreg)11079 record_promoted_value (rtx insn, rtx subreg)
11080 {
11081   rtx links, set;
11082   unsigned int regno = REGNO (SUBREG_REG (subreg));
11083   enum machine_mode mode = GET_MODE (subreg);
11084 
11085   if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11086     return;
11087 
11088   for (links = LOG_LINKS (insn); links;)
11089     {
11090       insn = XEXP (links, 0);
11091       set = single_set (insn);
11092 
11093       if (! set || !REG_P (SET_DEST (set))
11094 	  || REGNO (SET_DEST (set)) != regno
11095 	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11096 	{
11097 	  links = XEXP (links, 1);
11098 	  continue;
11099 	}
11100 
11101       if (reg_stat[regno].last_set == insn)
11102 	{
11103 	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11104 	    reg_stat[regno].last_set_nonzero_bits &= GET_MODE_MASK (mode);
11105 	}
11106 
11107       if (REG_P (SET_SRC (set)))
11108 	{
11109 	  regno = REGNO (SET_SRC (set));
11110 	  links = LOG_LINKS (insn);
11111 	}
11112       else
11113 	break;
11114     }
11115 }
11116 
11117 /* Check if X, a register, is known to contain a value already
11118    truncated to MODE.  In this case we can use a subreg to refer to
11119    the truncated value even though in the generic case we would need
11120    an explicit truncation.  */
11121 
11122 static bool
reg_truncated_to_mode(enum machine_mode mode,rtx x)11123 reg_truncated_to_mode (enum machine_mode mode, rtx x)
11124 {
11125   enum machine_mode truncated = reg_stat[REGNO (x)].truncated_to_mode;
11126 
11127   if (truncated == 0 || reg_stat[REGNO (x)].truncation_label != label_tick)
11128     return false;
11129   if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
11130     return true;
11131   if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
11132 			     GET_MODE_BITSIZE (truncated)))
11133     return true;
11134   return false;
11135 }
11136 
11137 /* X is a REG or a SUBREG.  If X is some sort of a truncation record
11138    it.  For non-TRULY_NOOP_TRUNCATION targets we might be able to turn
11139    a truncate into a subreg using this information.  */
11140 
11141 static void
record_truncated_value(rtx x)11142 record_truncated_value (rtx x)
11143 {
11144   enum machine_mode truncated_mode;
11145 
11146   if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
11147     {
11148       enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
11149       truncated_mode = GET_MODE (x);
11150 
11151       if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
11152 	return;
11153 
11154       if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode),
11155 				 GET_MODE_BITSIZE (original_mode)))
11156 	return;
11157 
11158       x = SUBREG_REG (x);
11159     }
11160   /* ??? For hard-regs we now record everything.  We might be able to
11161      optimize this using last_set_mode.  */
11162   else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
11163     truncated_mode = GET_MODE (x);
11164   else
11165     return;
11166 
11167   if (reg_stat[REGNO (x)].truncated_to_mode == 0
11168       || reg_stat[REGNO (x)].truncation_label < label_tick
11169       || (GET_MODE_SIZE (truncated_mode)
11170 	  < GET_MODE_SIZE (reg_stat[REGNO (x)].truncated_to_mode)))
11171     {
11172       reg_stat[REGNO (x)].truncated_to_mode = truncated_mode;
11173       reg_stat[REGNO (x)].truncation_label = label_tick;
11174     }
11175 }
11176 
11177 /* Scan X for promoted SUBREGs and truncated REGs.  For each one
11178    found, note what it implies to the registers used in it.  */
11179 
11180 static void
check_conversions(rtx insn,rtx x)11181 check_conversions (rtx insn, rtx x)
11182 {
11183   if (GET_CODE (x) == SUBREG || REG_P (x))
11184     {
11185       if (GET_CODE (x) == SUBREG
11186 	  && SUBREG_PROMOTED_VAR_P (x)
11187 	  && REG_P (SUBREG_REG (x)))
11188 	record_promoted_value (insn, x);
11189 
11190       record_truncated_value (x);
11191     }
11192   else
11193     {
11194       const char *format = GET_RTX_FORMAT (GET_CODE (x));
11195       int i, j;
11196 
11197       for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11198 	switch (format[i])
11199 	  {
11200 	  case 'e':
11201 	    check_conversions (insn, XEXP (x, i));
11202 	    break;
11203 	  case 'V':
11204 	  case 'E':
11205 	    if (XVEC (x, i) != 0)
11206 	      for (j = 0; j < XVECLEN (x, i); j++)
11207 		check_conversions (insn, XVECEXP (x, i, j));
11208 	    break;
11209 	  }
11210     }
11211 }
11212 
11213 /* Utility routine for the following function.  Verify that all the registers
11214    mentioned in *LOC are valid when *LOC was part of a value set when
11215    label_tick == TICK.  Return 0 if some are not.
11216 
11217    If REPLACE is nonzero, replace the invalid reference with
11218    (clobber (const_int 0)) and return 1.  This replacement is useful because
11219    we often can get useful information about the form of a value (e.g., if
11220    it was produced by a shift that always produces -1 or 0) even though
11221    we don't know exactly what registers it was produced from.  */
11222 
11223 static int
get_last_value_validate(rtx * loc,rtx insn,int tick,int replace)11224 get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
11225 {
11226   rtx x = *loc;
11227   const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11228   int len = GET_RTX_LENGTH (GET_CODE (x));
11229   int i;
11230 
11231   if (REG_P (x))
11232     {
11233       unsigned int regno = REGNO (x);
11234       unsigned int endregno
11235 	= regno + (regno < FIRST_PSEUDO_REGISTER
11236 		   ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11237       unsigned int j;
11238 
11239       for (j = regno; j < endregno; j++)
11240 	if (reg_stat[j].last_set_invalid
11241 	    /* If this is a pseudo-register that was only set once and not
11242 	       live at the beginning of the function, it is always valid.  */
11243 	    || (! (regno >= FIRST_PSEUDO_REGISTER
11244 		   && REG_N_SETS (regno) == 1
11245 		   && (! REGNO_REG_SET_P
11246 		       (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11247 			regno)))
11248 		&& reg_stat[j].last_set_label > tick))
11249 	  {
11250 	    if (replace)
11251 	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11252 	    return replace;
11253 	  }
11254 
11255       return 1;
11256     }
11257   /* If this is a memory reference, make sure that there were
11258      no stores after it that might have clobbered the value.  We don't
11259      have alias info, so we assume any store invalidates it.  */
11260   else if (MEM_P (x) && !MEM_READONLY_P (x)
11261 	   && INSN_CUID (insn) <= mem_last_set)
11262     {
11263       if (replace)
11264 	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11265       return replace;
11266     }
11267 
11268   for (i = 0; i < len; i++)
11269     {
11270       if (fmt[i] == 'e')
11271 	{
11272 	  /* Check for identical subexpressions.  If x contains
11273 	     identical subexpression we only have to traverse one of
11274 	     them.  */
11275 	  if (i == 1 && ARITHMETIC_P (x))
11276 	    {
11277 	      /* Note that at this point x0 has already been checked
11278 		 and found valid.  */
11279 	      rtx x0 = XEXP (x, 0);
11280 	      rtx x1 = XEXP (x, 1);
11281 
11282 	      /* If x0 and x1 are identical then x is also valid.  */
11283 	      if (x0 == x1)
11284 		return 1;
11285 
11286 	      /* If x1 is identical to a subexpression of x0 then
11287 		 while checking x0, x1 has already been checked.  Thus
11288 		 it is valid and so as x.  */
11289 	      if (ARITHMETIC_P (x0)
11290 		  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11291 		return 1;
11292 
11293 	      /* If x0 is identical to a subexpression of x1 then x is
11294 		 valid iff the rest of x1 is valid.  */
11295 	      if (ARITHMETIC_P (x1)
11296 		  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11297 		return
11298 		  get_last_value_validate (&XEXP (x1,
11299 						  x0 == XEXP (x1, 0) ? 1 : 0),
11300 					   insn, tick, replace);
11301 	    }
11302 
11303 	  if (get_last_value_validate (&XEXP (x, i), insn, tick,
11304 				       replace) == 0)
11305 	    return 0;
11306 	}
11307       /* Don't bother with these.  They shouldn't occur anyway.  */
11308       else if (fmt[i] == 'E')
11309 	return 0;
11310     }
11311 
11312   /* If we haven't found a reason for it to be invalid, it is valid.  */
11313   return 1;
11314 }
11315 
11316 /* Get the last value assigned to X, if known.  Some registers
11317    in the value may be replaced with (clobber (const_int 0)) if their value
11318    is known longer known reliably.  */
11319 
11320 static rtx
get_last_value(rtx x)11321 get_last_value (rtx x)
11322 {
11323   unsigned int regno;
11324   rtx value;
11325 
11326   /* If this is a non-paradoxical SUBREG, get the value of its operand and
11327      then convert it to the desired mode.  If this is a paradoxical SUBREG,
11328      we cannot predict what values the "extra" bits might have.  */
11329   if (GET_CODE (x) == SUBREG
11330       && subreg_lowpart_p (x)
11331       && (GET_MODE_SIZE (GET_MODE (x))
11332 	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11333       && (value = get_last_value (SUBREG_REG (x))) != 0)
11334     return gen_lowpart (GET_MODE (x), value);
11335 
11336   if (!REG_P (x))
11337     return 0;
11338 
11339   regno = REGNO (x);
11340   value = reg_stat[regno].last_set_value;
11341 
11342   /* If we don't have a value, or if it isn't for this basic block and
11343      it's either a hard register, set more than once, or it's a live
11344      at the beginning of the function, return 0.
11345 
11346      Because if it's not live at the beginning of the function then the reg
11347      is always set before being used (is never used without being set).
11348      And, if it's set only once, and it's always set before use, then all
11349      uses must have the same last value, even if it's not from this basic
11350      block.  */
11351 
11352   if (value == 0
11353       || (reg_stat[regno].last_set_label != label_tick
11354 	  && (regno < FIRST_PSEUDO_REGISTER
11355 	      || REG_N_SETS (regno) != 1
11356 	      || (REGNO_REG_SET_P
11357 		  (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11358 		   regno)))))
11359     return 0;
11360 
11361   /* If the value was set in a later insn than the ones we are processing,
11362      we can't use it even if the register was only set once.  */
11363   if (INSN_CUID (reg_stat[regno].last_set) >= subst_low_cuid)
11364     return 0;
11365 
11366   /* If the value has all its registers valid, return it.  */
11367   if (get_last_value_validate (&value, reg_stat[regno].last_set,
11368 			       reg_stat[regno].last_set_label, 0))
11369     return value;
11370 
11371   /* Otherwise, make a copy and replace any invalid register with
11372      (clobber (const_int 0)).  If that fails for some reason, return 0.  */
11373 
11374   value = copy_rtx (value);
11375   if (get_last_value_validate (&value, reg_stat[regno].last_set,
11376 			       reg_stat[regno].last_set_label, 1))
11377     return value;
11378 
11379   return 0;
11380 }
11381 
11382 /* Return nonzero if expression X refers to a REG or to memory
11383    that is set in an instruction more recent than FROM_CUID.  */
11384 
11385 static int
use_crosses_set_p(rtx x,int from_cuid)11386 use_crosses_set_p (rtx x, int from_cuid)
11387 {
11388   const char *fmt;
11389   int i;
11390   enum rtx_code code = GET_CODE (x);
11391 
11392   if (code == REG)
11393     {
11394       unsigned int regno = REGNO (x);
11395       unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11396 				 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11397 
11398 #ifdef PUSH_ROUNDING
11399       /* Don't allow uses of the stack pointer to be moved,
11400 	 because we don't know whether the move crosses a push insn.  */
11401       if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11402 	return 1;
11403 #endif
11404       for (; regno < endreg; regno++)
11405 	if (reg_stat[regno].last_set
11406 	    && INSN_CUID (reg_stat[regno].last_set) > from_cuid)
11407 	  return 1;
11408       return 0;
11409     }
11410 
11411   if (code == MEM && mem_last_set > from_cuid)
11412     return 1;
11413 
11414   fmt = GET_RTX_FORMAT (code);
11415 
11416   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11417     {
11418       if (fmt[i] == 'E')
11419 	{
11420 	  int j;
11421 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11422 	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11423 	      return 1;
11424 	}
11425       else if (fmt[i] == 'e'
11426 	       && use_crosses_set_p (XEXP (x, i), from_cuid))
11427 	return 1;
11428     }
11429   return 0;
11430 }
11431 
11432 /* Define three variables used for communication between the following
11433    routines.  */
11434 
11435 static unsigned int reg_dead_regno, reg_dead_endregno;
11436 static int reg_dead_flag;
11437 
11438 /* Function called via note_stores from reg_dead_at_p.
11439 
11440    If DEST is within [reg_dead_regno, reg_dead_endregno), set
11441    reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
11442 
11443 static void
reg_dead_at_p_1(rtx dest,rtx x,void * data ATTRIBUTE_UNUSED)11444 reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
11445 {
11446   unsigned int regno, endregno;
11447 
11448   if (!REG_P (dest))
11449     return;
11450 
11451   regno = REGNO (dest);
11452   endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11453 		      ? hard_regno_nregs[regno][GET_MODE (dest)] : 1);
11454 
11455   if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11456     reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11457 }
11458 
11459 /* Return nonzero if REG is known to be dead at INSN.
11460 
11461    We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
11462    referencing REG, it is dead.  If we hit a SET referencing REG, it is
11463    live.  Otherwise, see if it is live or dead at the start of the basic
11464    block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
11465    must be assumed to be always live.  */
11466 
11467 static int
reg_dead_at_p(rtx reg,rtx insn)11468 reg_dead_at_p (rtx reg, rtx insn)
11469 {
11470   basic_block block;
11471   unsigned int i;
11472 
11473   /* Set variables for reg_dead_at_p_1.  */
11474   reg_dead_regno = REGNO (reg);
11475   reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11476 					? hard_regno_nregs[reg_dead_regno]
11477 							  [GET_MODE (reg)]
11478 					: 1);
11479 
11480   reg_dead_flag = 0;
11481 
11482   /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  For fixed registers
11483      we allow the machine description to decide whether use-and-clobber
11484      patterns are OK.  */
11485   if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11486     {
11487       for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11488 	if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
11489 	  return 0;
11490     }
11491 
11492   /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11493      beginning of function.  */
11494   for (; insn && !LABEL_P (insn) && !BARRIER_P (insn);
11495        insn = prev_nonnote_insn (insn))
11496     {
11497       note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11498       if (reg_dead_flag)
11499 	return reg_dead_flag == 1 ? 1 : 0;
11500 
11501       if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11502 	return 1;
11503     }
11504 
11505   /* Get the basic block that we were in.  */
11506   if (insn == 0)
11507     block = ENTRY_BLOCK_PTR->next_bb;
11508   else
11509     {
11510       FOR_EACH_BB (block)
11511 	if (insn == BB_HEAD (block))
11512 	  break;
11513 
11514       if (block == EXIT_BLOCK_PTR)
11515 	return 0;
11516     }
11517 
11518   for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11519     if (REGNO_REG_SET_P (block->il.rtl->global_live_at_start, i))
11520       return 0;
11521 
11522   return 1;
11523 }
11524 
11525 /* Note hard registers in X that are used.  This code is similar to
11526    that in flow.c, but much simpler since we don't care about pseudos.  */
11527 
11528 static void
mark_used_regs_combine(rtx x)11529 mark_used_regs_combine (rtx x)
11530 {
11531   RTX_CODE code = GET_CODE (x);
11532   unsigned int regno;
11533   int i;
11534 
11535   switch (code)
11536     {
11537     case LABEL_REF:
11538     case SYMBOL_REF:
11539     case CONST_INT:
11540     case CONST:
11541     case CONST_DOUBLE:
11542     case CONST_VECTOR:
11543     case PC:
11544     case ADDR_VEC:
11545     case ADDR_DIFF_VEC:
11546     case ASM_INPUT:
11547 #ifdef HAVE_cc0
11548     /* CC0 must die in the insn after it is set, so we don't need to take
11549        special note of it here.  */
11550     case CC0:
11551 #endif
11552       return;
11553 
11554     case CLOBBER:
11555       /* If we are clobbering a MEM, mark any hard registers inside the
11556 	 address as used.  */
11557       if (MEM_P (XEXP (x, 0)))
11558 	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11559       return;
11560 
11561     case REG:
11562       regno = REGNO (x);
11563       /* A hard reg in a wide mode may really be multiple registers.
11564 	 If so, mark all of them just like the first.  */
11565       if (regno < FIRST_PSEUDO_REGISTER)
11566 	{
11567 	  unsigned int endregno, r;
11568 
11569 	  /* None of this applies to the stack, frame or arg pointers.  */
11570 	  if (regno == STACK_POINTER_REGNUM
11571 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11572 	      || regno == HARD_FRAME_POINTER_REGNUM
11573 #endif
11574 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11575 	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11576 #endif
11577 	      || regno == FRAME_POINTER_REGNUM)
11578 	    return;
11579 
11580 	  endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11581 	  for (r = regno; r < endregno; r++)
11582 	    SET_HARD_REG_BIT (newpat_used_regs, r);
11583 	}
11584       return;
11585 
11586     case SET:
11587       {
11588 	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11589 	   the address.  */
11590 	rtx testreg = SET_DEST (x);
11591 
11592 	while (GET_CODE (testreg) == SUBREG
11593 	       || GET_CODE (testreg) == ZERO_EXTRACT
11594 	       || GET_CODE (testreg) == STRICT_LOW_PART)
11595 	  testreg = XEXP (testreg, 0);
11596 
11597 	if (MEM_P (testreg))
11598 	  mark_used_regs_combine (XEXP (testreg, 0));
11599 
11600 	mark_used_regs_combine (SET_SRC (x));
11601       }
11602       return;
11603 
11604     default:
11605       break;
11606     }
11607 
11608   /* Recursively scan the operands of this expression.  */
11609 
11610   {
11611     const char *fmt = GET_RTX_FORMAT (code);
11612 
11613     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11614       {
11615 	if (fmt[i] == 'e')
11616 	  mark_used_regs_combine (XEXP (x, i));
11617 	else if (fmt[i] == 'E')
11618 	  {
11619 	    int j;
11620 
11621 	    for (j = 0; j < XVECLEN (x, i); j++)
11622 	      mark_used_regs_combine (XVECEXP (x, i, j));
11623 	  }
11624       }
11625   }
11626 }
11627 
11628 /* Remove register number REGNO from the dead registers list of INSN.
11629 
11630    Return the note used to record the death, if there was one.  */
11631 
11632 rtx
remove_death(unsigned int regno,rtx insn)11633 remove_death (unsigned int regno, rtx insn)
11634 {
11635   rtx note = find_regno_note (insn, REG_DEAD, regno);
11636 
11637   if (note)
11638     {
11639       REG_N_DEATHS (regno)--;
11640       remove_note (insn, note);
11641     }
11642 
11643   return note;
11644 }
11645 
11646 /* For each register (hardware or pseudo) used within expression X, if its
11647    death is in an instruction with cuid between FROM_CUID (inclusive) and
11648    TO_INSN (exclusive), put a REG_DEAD note for that register in the
11649    list headed by PNOTES.
11650 
11651    That said, don't move registers killed by maybe_kill_insn.
11652 
11653    This is done when X is being merged by combination into TO_INSN.  These
11654    notes will then be distributed as needed.  */
11655 
11656 static void
move_deaths(rtx x,rtx maybe_kill_insn,int from_cuid,rtx to_insn,rtx * pnotes)11657 move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
11658 	     rtx *pnotes)
11659 {
11660   const char *fmt;
11661   int len, i;
11662   enum rtx_code code = GET_CODE (x);
11663 
11664   if (code == REG)
11665     {
11666       unsigned int regno = REGNO (x);
11667       rtx where_dead = reg_stat[regno].last_death;
11668       rtx before_dead, after_dead;
11669 
11670       /* Don't move the register if it gets killed in between from and to.  */
11671       if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11672 	  && ! reg_referenced_p (x, maybe_kill_insn))
11673 	return;
11674 
11675       /* WHERE_DEAD could be a USE insn made by combine, so first we
11676 	 make sure that we have insns with valid INSN_CUID values.  */
11677       before_dead = where_dead;
11678       while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11679 	before_dead = PREV_INSN (before_dead);
11680 
11681       after_dead = where_dead;
11682       while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11683 	after_dead = NEXT_INSN (after_dead);
11684 
11685       if (before_dead && after_dead
11686 	  && INSN_CUID (before_dead) >= from_cuid
11687 	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11688 	      || (where_dead != after_dead
11689 		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11690 	{
11691 	  rtx note = remove_death (regno, where_dead);
11692 
11693 	  /* It is possible for the call above to return 0.  This can occur
11694 	     when last_death points to I2 or I1 that we combined with.
11695 	     In that case make a new note.
11696 
11697 	     We must also check for the case where X is a hard register
11698 	     and NOTE is a death note for a range of hard registers
11699 	     including X.  In that case, we must put REG_DEAD notes for
11700 	     the remaining registers in place of NOTE.  */
11701 
11702 	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11703 	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11704 		  > GET_MODE_SIZE (GET_MODE (x))))
11705 	    {
11706 	      unsigned int deadregno = REGNO (XEXP (note, 0));
11707 	      unsigned int deadend
11708 		= (deadregno + hard_regno_nregs[deadregno]
11709 					       [GET_MODE (XEXP (note, 0))]);
11710 	      unsigned int ourend
11711 		= regno + hard_regno_nregs[regno][GET_MODE (x)];
11712 	      unsigned int i;
11713 
11714 	      for (i = deadregno; i < deadend; i++)
11715 		if (i < regno || i >= ourend)
11716 		  REG_NOTES (where_dead)
11717 		    = gen_rtx_EXPR_LIST (REG_DEAD,
11718 					 regno_reg_rtx[i],
11719 					 REG_NOTES (where_dead));
11720 	    }
11721 
11722 	  /* If we didn't find any note, or if we found a REG_DEAD note that
11723 	     covers only part of the given reg, and we have a multi-reg hard
11724 	     register, then to be safe we must check for REG_DEAD notes
11725 	     for each register other than the first.  They could have
11726 	     their own REG_DEAD notes lying around.  */
11727 	  else if ((note == 0
11728 		    || (note != 0
11729 			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11730 			    < GET_MODE_SIZE (GET_MODE (x)))))
11731 		   && regno < FIRST_PSEUDO_REGISTER
11732 		   && hard_regno_nregs[regno][GET_MODE (x)] > 1)
11733 	    {
11734 	      unsigned int ourend
11735 		= regno + hard_regno_nregs[regno][GET_MODE (x)];
11736 	      unsigned int i, offset;
11737 	      rtx oldnotes = 0;
11738 
11739 	      if (note)
11740 		offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
11741 	      else
11742 		offset = 1;
11743 
11744 	      for (i = regno + offset; i < ourend; i++)
11745 		move_deaths (regno_reg_rtx[i],
11746 			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11747 	    }
11748 
11749 	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11750 	    {
11751 	      XEXP (note, 1) = *pnotes;
11752 	      *pnotes = note;
11753 	    }
11754 	  else
11755 	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11756 
11757 	  REG_N_DEATHS (regno)++;
11758 	}
11759 
11760       return;
11761     }
11762 
11763   else if (GET_CODE (x) == SET)
11764     {
11765       rtx dest = SET_DEST (x);
11766 
11767       move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11768 
11769       /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11770 	 that accesses one word of a multi-word item, some
11771 	 piece of everything register in the expression is used by
11772 	 this insn, so remove any old death.  */
11773       /* ??? So why do we test for equality of the sizes?  */
11774 
11775       if (GET_CODE (dest) == ZERO_EXTRACT
11776 	  || GET_CODE (dest) == STRICT_LOW_PART
11777 	  || (GET_CODE (dest) == SUBREG
11778 	      && (((GET_MODE_SIZE (GET_MODE (dest))
11779 		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11780 		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11781 		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11782 	{
11783 	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11784 	  return;
11785 	}
11786 
11787       /* If this is some other SUBREG, we know it replaces the entire
11788 	 value, so use that as the destination.  */
11789       if (GET_CODE (dest) == SUBREG)
11790 	dest = SUBREG_REG (dest);
11791 
11792       /* If this is a MEM, adjust deaths of anything used in the address.
11793 	 For a REG (the only other possibility), the entire value is
11794 	 being replaced so the old value is not used in this insn.  */
11795 
11796       if (MEM_P (dest))
11797 	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11798 		     to_insn, pnotes);
11799       return;
11800     }
11801 
11802   else if (GET_CODE (x) == CLOBBER)
11803     return;
11804 
11805   len = GET_RTX_LENGTH (code);
11806   fmt = GET_RTX_FORMAT (code);
11807 
11808   for (i = 0; i < len; i++)
11809     {
11810       if (fmt[i] == 'E')
11811 	{
11812 	  int j;
11813 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11814 	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11815 			 to_insn, pnotes);
11816 	}
11817       else if (fmt[i] == 'e')
11818 	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11819     }
11820 }
11821 
11822 /* Return 1 if X is the target of a bit-field assignment in BODY, the
11823    pattern of an insn.  X must be a REG.  */
11824 
11825 static int
reg_bitfield_target_p(rtx x,rtx body)11826 reg_bitfield_target_p (rtx x, rtx body)
11827 {
11828   int i;
11829 
11830   if (GET_CODE (body) == SET)
11831     {
11832       rtx dest = SET_DEST (body);
11833       rtx target;
11834       unsigned int regno, tregno, endregno, endtregno;
11835 
11836       if (GET_CODE (dest) == ZERO_EXTRACT)
11837 	target = XEXP (dest, 0);
11838       else if (GET_CODE (dest) == STRICT_LOW_PART)
11839 	target = SUBREG_REG (XEXP (dest, 0));
11840       else
11841 	return 0;
11842 
11843       if (GET_CODE (target) == SUBREG)
11844 	target = SUBREG_REG (target);
11845 
11846       if (!REG_P (target))
11847 	return 0;
11848 
11849       tregno = REGNO (target), regno = REGNO (x);
11850       if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11851 	return target == x;
11852 
11853       endtregno = tregno + hard_regno_nregs[tregno][GET_MODE (target)];
11854       endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11855 
11856       return endregno > tregno && regno < endtregno;
11857     }
11858 
11859   else if (GET_CODE (body) == PARALLEL)
11860     for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11861       if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11862 	return 1;
11863 
11864   return 0;
11865 }
11866 
11867 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11868    as appropriate.  I3 and I2 are the insns resulting from the combination
11869    insns including FROM (I2 may be zero).
11870 
11871    ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11872    not need REG_DEAD notes because they are being substituted for.  This
11873    saves searching in the most common cases.
11874 
11875    Each note in the list is either ignored or placed on some insns, depending
11876    on the type of note.  */
11877 
11878 static void
distribute_notes(rtx notes,rtx from_insn,rtx i3,rtx i2,rtx elim_i2,rtx elim_i1)11879 distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
11880 		  rtx elim_i1)
11881 {
11882   rtx note, next_note;
11883   rtx tem;
11884 
11885   for (note = notes; note; note = next_note)
11886     {
11887       rtx place = 0, place2 = 0;
11888 
11889       next_note = XEXP (note, 1);
11890       switch (REG_NOTE_KIND (note))
11891 	{
11892 	case REG_BR_PROB:
11893 	case REG_BR_PRED:
11894 	  /* Doesn't matter much where we put this, as long as it's somewhere.
11895 	     It is preferable to keep these notes on branches, which is most
11896 	     likely to be i3.  */
11897 	  place = i3;
11898 	  break;
11899 
11900 	case REG_VALUE_PROFILE:
11901 	  /* Just get rid of this note, as it is unused later anyway.  */
11902 	  break;
11903 
11904 	case REG_NON_LOCAL_GOTO:
11905 	  if (JUMP_P (i3))
11906 	    place = i3;
11907 	  else
11908 	    {
11909 	      gcc_assert (i2 && JUMP_P (i2));
11910 	      place = i2;
11911 	    }
11912 	  break;
11913 
11914 	case REG_EH_REGION:
11915 	  /* These notes must remain with the call or trapping instruction.  */
11916 	  if (CALL_P (i3))
11917 	    place = i3;
11918 	  else if (i2 && CALL_P (i2))
11919 	    place = i2;
11920 	  else
11921 	    {
11922 	      gcc_assert (flag_non_call_exceptions);
11923 	      if (may_trap_p (i3))
11924 		place = i3;
11925 	      else if (i2 && may_trap_p (i2))
11926 		place = i2;
11927 	      /* ??? Otherwise assume we've combined things such that we
11928 		 can now prove that the instructions can't trap.  Drop the
11929 		 note in this case.  */
11930 	    }
11931 	  break;
11932 
11933 	case REG_NORETURN:
11934 	case REG_SETJMP:
11935 	  /* These notes must remain with the call.  It should not be
11936 	     possible for both I2 and I3 to be a call.  */
11937 	  if (CALL_P (i3))
11938 	    place = i3;
11939 	  else
11940 	    {
11941 	      gcc_assert (i2 && CALL_P (i2));
11942 	      place = i2;
11943 	    }
11944 	  break;
11945 
11946 	case REG_UNUSED:
11947 	  /* Any clobbers for i3 may still exist, and so we must process
11948 	     REG_UNUSED notes from that insn.
11949 
11950 	     Any clobbers from i2 or i1 can only exist if they were added by
11951 	     recog_for_combine.  In that case, recog_for_combine created the
11952 	     necessary REG_UNUSED notes.  Trying to keep any original
11953 	     REG_UNUSED notes from these insns can cause incorrect output
11954 	     if it is for the same register as the original i3 dest.
11955 	     In that case, we will notice that the register is set in i3,
11956 	     and then add a REG_UNUSED note for the destination of i3, which
11957 	     is wrong.  However, it is possible to have REG_UNUSED notes from
11958 	     i2 or i1 for register which were both used and clobbered, so
11959 	     we keep notes from i2 or i1 if they will turn into REG_DEAD
11960 	     notes.  */
11961 
11962 	  /* If this register is set or clobbered in I3, put the note there
11963 	     unless there is one already.  */
11964 	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
11965 	    {
11966 	      if (from_insn != i3)
11967 		break;
11968 
11969 	      if (! (REG_P (XEXP (note, 0))
11970 		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11971 		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11972 		place = i3;
11973 	    }
11974 	  /* Otherwise, if this register is used by I3, then this register
11975 	     now dies here, so we must put a REG_DEAD note here unless there
11976 	     is one already.  */
11977 	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11978 		   && ! (REG_P (XEXP (note, 0))
11979 			 ? find_regno_note (i3, REG_DEAD,
11980 					    REGNO (XEXP (note, 0)))
11981 			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11982 	    {
11983 	      PUT_REG_NOTE_KIND (note, REG_DEAD);
11984 	      place = i3;
11985 	    }
11986 	  break;
11987 
11988 	case REG_EQUAL:
11989 	case REG_EQUIV:
11990 	case REG_NOALIAS:
11991 	  /* These notes say something about results of an insn.  We can
11992 	     only support them if they used to be on I3 in which case they
11993 	     remain on I3.  Otherwise they are ignored.
11994 
11995 	     If the note refers to an expression that is not a constant, we
11996 	     must also ignore the note since we cannot tell whether the
11997 	     equivalence is still true.  It might be possible to do
11998 	     slightly better than this (we only have a problem if I2DEST
11999 	     or I1DEST is present in the expression), but it doesn't
12000 	     seem worth the trouble.  */
12001 
12002 	  if (from_insn == i3
12003 	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12004 	    place = i3;
12005 	  break;
12006 
12007 	case REG_INC:
12008 	case REG_NO_CONFLICT:
12009 	  /* These notes say something about how a register is used.  They must
12010 	     be present on any use of the register in I2 or I3.  */
12011 	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12012 	    place = i3;
12013 
12014 	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12015 	    {
12016 	      if (place)
12017 		place2 = i2;
12018 	      else
12019 		place = i2;
12020 	    }
12021 	  break;
12022 
12023 	case REG_LABEL:
12024 	  /* This can show up in several ways -- either directly in the
12025 	     pattern, or hidden off in the constant pool with (or without?)
12026 	     a REG_EQUAL note.  */
12027 	  /* ??? Ignore the without-reg_equal-note problem for now.  */
12028 	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12029 	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12030 		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12031 		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12032 	    place = i3;
12033 
12034 	  if (i2
12035 	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12036 		  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12037 		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12038 		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12039 	    {
12040 	      if (place)
12041 		place2 = i2;
12042 	      else
12043 		place = i2;
12044 	    }
12045 
12046 	  /* Don't attach REG_LABEL note to a JUMP_INSN.  Add
12047 	     a JUMP_LABEL instead or decrement LABEL_NUSES.  */
12048 	  if (place && JUMP_P (place))
12049 	    {
12050 	      rtx label = JUMP_LABEL (place);
12051 
12052 	      if (!label)
12053 		JUMP_LABEL (place) = XEXP (note, 0);
12054 	      else
12055 		{
12056 		  gcc_assert (label == XEXP (note, 0));
12057 		  if (LABEL_P (label))
12058 		    LABEL_NUSES (label)--;
12059 		}
12060 	      place = 0;
12061 	    }
12062 	  if (place2 && JUMP_P (place2))
12063 	    {
12064 	      rtx label = JUMP_LABEL (place2);
12065 
12066 	      if (!label)
12067 		JUMP_LABEL (place2) = XEXP (note, 0);
12068 	      else
12069 		{
12070 		  gcc_assert (label == XEXP (note, 0));
12071 		  if (LABEL_P (label))
12072 		    LABEL_NUSES (label)--;
12073 		}
12074 	      place2 = 0;
12075 	    }
12076 	  break;
12077 
12078 	case REG_NONNEG:
12079 	  /* This note says something about the value of a register prior
12080 	     to the execution of an insn.  It is too much trouble to see
12081 	     if the note is still correct in all situations.  It is better
12082 	     to simply delete it.  */
12083 	  break;
12084 
12085 	case REG_RETVAL:
12086 	  /* If the insn previously containing this note still exists,
12087 	     put it back where it was.  Otherwise move it to the previous
12088 	     insn.  Adjust the corresponding REG_LIBCALL note.  */
12089 	  if (!NOTE_P (from_insn))
12090 	    place = from_insn;
12091 	  else
12092 	    {
12093 	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12094 	      place = prev_real_insn (from_insn);
12095 	      if (tem && place)
12096 		XEXP (tem, 0) = place;
12097 	      /* If we're deleting the last remaining instruction of a
12098 		 libcall sequence, don't add the notes.  */
12099 	      else if (XEXP (note, 0) == from_insn)
12100 		tem = place = 0;
12101 	      /* Don't add the dangling REG_RETVAL note.  */
12102 	      else if (! tem)
12103 		place = 0;
12104 	    }
12105 	  break;
12106 
12107 	case REG_LIBCALL:
12108 	  /* This is handled similarly to REG_RETVAL.  */
12109 	  if (!NOTE_P (from_insn))
12110 	    place = from_insn;
12111 	  else
12112 	    {
12113 	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12114 	      place = next_real_insn (from_insn);
12115 	      if (tem && place)
12116 		XEXP (tem, 0) = place;
12117 	      /* If we're deleting the last remaining instruction of a
12118 		 libcall sequence, don't add the notes.  */
12119 	      else if (XEXP (note, 0) == from_insn)
12120 		tem = place = 0;
12121 	      /* Don't add the dangling REG_LIBCALL note.  */
12122 	      else if (! tem)
12123 		place = 0;
12124 	    }
12125 	  break;
12126 
12127 	case REG_DEAD:
12128 	  /* If we replaced the right hand side of FROM_INSN with a
12129 	     REG_EQUAL note, the original use of the dying register
12130 	     will not have been combined into I3 and I2.  In such cases,
12131 	     FROM_INSN is guaranteed to be the first of the combined
12132 	     instructions, so we simply need to search back before
12133 	     FROM_INSN for the previous use or set of this register,
12134 	     then alter the notes there appropriately.
12135 
12136 	     If the register is used as an input in I3, it dies there.
12137 	     Similarly for I2, if it is nonzero and adjacent to I3.
12138 
12139 	     If the register is not used as an input in either I3 or I2
12140 	     and it is not one of the registers we were supposed to eliminate,
12141 	     there are two possibilities.  We might have a non-adjacent I2
12142 	     or we might have somehow eliminated an additional register
12143 	     from a computation.  For example, we might have had A & B where
12144 	     we discover that B will always be zero.  In this case we will
12145 	     eliminate the reference to A.
12146 
12147 	     In both cases, we must search to see if we can find a previous
12148 	     use of A and put the death note there.  */
12149 
12150 	  if (from_insn
12151 	      && from_insn == i2mod
12152 	      && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
12153 	    tem = from_insn;
12154 	  else
12155 	    {
12156 	      if (from_insn
12157 		  && CALL_P (from_insn)
12158 		  && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12159 		place = from_insn;
12160 	      else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12161 		place = i3;
12162 	      else if (i2 != 0 && next_nonnote_insn (i2) == i3
12163 		       && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12164 		place = i2;
12165 	      else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
12166 			&& !(i2mod
12167 			     && reg_overlap_mentioned_p (XEXP (note, 0),
12168 							 i2mod_old_rhs)))
12169 		       || rtx_equal_p (XEXP (note, 0), elim_i1))
12170 		break;
12171 	      tem = i3;
12172 	    }
12173 
12174 	  if (place == 0)
12175 	    {
12176 	      basic_block bb = this_basic_block;
12177 
12178 	      for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem))
12179 		{
12180 		  if (! INSN_P (tem))
12181 		    {
12182 		      if (tem == BB_HEAD (bb))
12183 			break;
12184 		      continue;
12185 		    }
12186 
12187 		  /* If the register is being set at TEM, see if that is all
12188 		     TEM is doing.  If so, delete TEM.  Otherwise, make this
12189 		     into a REG_UNUSED note instead. Don't delete sets to
12190 		     global register vars.  */
12191 		  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12192 		       || !global_regs[REGNO (XEXP (note, 0))])
12193 		      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12194 		    {
12195 		      rtx set = single_set (tem);
12196 		      rtx inner_dest = 0;
12197 #ifdef HAVE_cc0
12198 		      rtx cc0_setter = NULL_RTX;
12199 #endif
12200 
12201 		      if (set != 0)
12202 			for (inner_dest = SET_DEST (set);
12203 			     (GET_CODE (inner_dest) == STRICT_LOW_PART
12204 			      || GET_CODE (inner_dest) == SUBREG
12205 			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
12206 			     inner_dest = XEXP (inner_dest, 0))
12207 			  ;
12208 
12209 		      /* Verify that it was the set, and not a clobber that
12210 			 modified the register.
12211 
12212 			 CC0 targets must be careful to maintain setter/user
12213 			 pairs.  If we cannot delete the setter due to side
12214 			 effects, mark the user with an UNUSED note instead
12215 			 of deleting it.  */
12216 
12217 		      if (set != 0 && ! side_effects_p (SET_SRC (set))
12218 			  && rtx_equal_p (XEXP (note, 0), inner_dest)
12219 #ifdef HAVE_cc0
12220 			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12221 			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12222 				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12223 #endif
12224 			  )
12225 			{
12226 			  /* Move the notes and links of TEM elsewhere.
12227 			     This might delete other dead insns recursively.
12228 			     First set the pattern to something that won't use
12229 			     any register.  */
12230 			  rtx old_notes = REG_NOTES (tem);
12231 
12232 			  PATTERN (tem) = pc_rtx;
12233 			  REG_NOTES (tem) = NULL;
12234 
12235 			  distribute_notes (old_notes, tem, tem, NULL_RTX,
12236 					    NULL_RTX, NULL_RTX);
12237 			  distribute_links (LOG_LINKS (tem));
12238 
12239 			  SET_INSN_DELETED (tem);
12240 
12241 #ifdef HAVE_cc0
12242 			  /* Delete the setter too.  */
12243 			  if (cc0_setter)
12244 			    {
12245 			      PATTERN (cc0_setter) = pc_rtx;
12246 			      old_notes = REG_NOTES (cc0_setter);
12247 			      REG_NOTES (cc0_setter) = NULL;
12248 
12249 			      distribute_notes (old_notes, cc0_setter,
12250 						cc0_setter, NULL_RTX,
12251 						NULL_RTX, NULL_RTX);
12252 			      distribute_links (LOG_LINKS (cc0_setter));
12253 
12254 			      SET_INSN_DELETED (cc0_setter);
12255 			    }
12256 #endif
12257 			}
12258 		      else
12259 			{
12260 			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
12261 
12262 			  /*  If there isn't already a REG_UNUSED note, put one
12263 			      here.  Do not place a REG_DEAD note, even if
12264 			      the register is also used here; that would not
12265 			      match the algorithm used in lifetime analysis
12266 			      and can cause the consistency check in the
12267 			      scheduler to fail.  */
12268 			  if (! find_regno_note (tem, REG_UNUSED,
12269 						 REGNO (XEXP (note, 0))))
12270 			    place = tem;
12271 			  break;
12272 			}
12273 		    }
12274 		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12275 			   || (CALL_P (tem)
12276 			       && find_reg_fusage (tem, USE, XEXP (note, 0))))
12277 		    {
12278 		      place = tem;
12279 
12280 		      /* If we are doing a 3->2 combination, and we have a
12281 			 register which formerly died in i3 and was not used
12282 			 by i2, which now no longer dies in i3 and is used in
12283 			 i2 but does not die in i2, and place is between i2
12284 			 and i3, then we may need to move a link from place to
12285 			 i2.  */
12286 		      if (i2 && INSN_UID (place) <= max_uid_cuid
12287 			  && INSN_CUID (place) > INSN_CUID (i2)
12288 			  && from_insn
12289 			  && INSN_CUID (from_insn) > INSN_CUID (i2)
12290 			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12291 			{
12292 			  rtx links = LOG_LINKS (place);
12293 			  LOG_LINKS (place) = 0;
12294 			  distribute_links (links);
12295 			}
12296 		      break;
12297 		    }
12298 
12299 		  if (tem == BB_HEAD (bb))
12300 		    break;
12301 		}
12302 
12303 	      /* We haven't found an insn for the death note and it
12304 		 is still a REG_DEAD note, but we have hit the beginning
12305 		 of the block.  If the existing life info says the reg
12306 		 was dead, there's nothing left to do.  Otherwise, we'll
12307 		 need to do a global life update after combine.  */
12308 	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12309 		  && REGNO_REG_SET_P (bb->il.rtl->global_live_at_start,
12310 				      REGNO (XEXP (note, 0))))
12311 		SET_BIT (refresh_blocks, this_basic_block->index);
12312 	    }
12313 
12314 	  /* If the register is set or already dead at PLACE, we needn't do
12315 	     anything with this note if it is still a REG_DEAD note.
12316 	     We check here if it is set at all, not if is it totally replaced,
12317 	     which is what `dead_or_set_p' checks, so also check for it being
12318 	     set partially.  */
12319 
12320 	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
12321 	    {
12322 	      unsigned int regno = REGNO (XEXP (note, 0));
12323 
12324 	      /* Similarly, if the instruction on which we want to place
12325 		 the note is a noop, we'll need do a global live update
12326 		 after we remove them in delete_noop_moves.  */
12327 	      if (noop_move_p (place))
12328 		SET_BIT (refresh_blocks, this_basic_block->index);
12329 
12330 	      if (dead_or_set_p (place, XEXP (note, 0))
12331 		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12332 		{
12333 		  /* Unless the register previously died in PLACE, clear
12334 		     last_death.  [I no longer understand why this is
12335 		     being done.] */
12336 		  if (reg_stat[regno].last_death != place)
12337 		    reg_stat[regno].last_death = 0;
12338 		  place = 0;
12339 		}
12340 	      else
12341 		reg_stat[regno].last_death = place;
12342 
12343 	      /* If this is a death note for a hard reg that is occupying
12344 		 multiple registers, ensure that we are still using all
12345 		 parts of the object.  If we find a piece of the object
12346 		 that is unused, we must arrange for an appropriate REG_DEAD
12347 		 note to be added for it.  However, we can't just emit a USE
12348 		 and tag the note to it, since the register might actually
12349 		 be dead; so we recourse, and the recursive call then finds
12350 		 the previous insn that used this register.  */
12351 
12352 	      if (place && regno < FIRST_PSEUDO_REGISTER
12353 		  && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
12354 		{
12355 		  unsigned int endregno
12356 		    = regno + hard_regno_nregs[regno]
12357 					      [GET_MODE (XEXP (note, 0))];
12358 		  int all_used = 1;
12359 		  unsigned int i;
12360 
12361 		  for (i = regno; i < endregno; i++)
12362 		    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12363 			 && ! find_regno_fusage (place, USE, i))
12364 			|| dead_or_set_regno_p (place, i))
12365 		      all_used = 0;
12366 
12367 		  if (! all_used)
12368 		    {
12369 		      /* Put only REG_DEAD notes for pieces that are
12370 			 not already dead or set.  */
12371 
12372 		      for (i = regno; i < endregno;
12373 			   i += hard_regno_nregs[i][reg_raw_mode[i]])
12374 			{
12375 			  rtx piece = regno_reg_rtx[i];
12376 			  basic_block bb = this_basic_block;
12377 
12378 			  if (! dead_or_set_p (place, piece)
12379 			      && ! reg_bitfield_target_p (piece,
12380 							  PATTERN (place)))
12381 			    {
12382 			      rtx new_note
12383 				= gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12384 
12385 			      distribute_notes (new_note, place, place,
12386 						NULL_RTX, NULL_RTX, NULL_RTX);
12387 			    }
12388 			  else if (! refers_to_regno_p (i, i + 1,
12389 							PATTERN (place), 0)
12390 				   && ! find_regno_fusage (place, USE, i))
12391 			    for (tem = PREV_INSN (place); ;
12392 				 tem = PREV_INSN (tem))
12393 			      {
12394 				if (! INSN_P (tem))
12395 				  {
12396 				    if (tem == BB_HEAD (bb))
12397 				      {
12398 					SET_BIT (refresh_blocks,
12399 						 this_basic_block->index);
12400 					break;
12401 				      }
12402 				    continue;
12403 				  }
12404 				if (dead_or_set_p (tem, piece)
12405 				    || reg_bitfield_target_p (piece,
12406 							      PATTERN (tem)))
12407 				  {
12408 				    REG_NOTES (tem)
12409 				      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12410 							   REG_NOTES (tem));
12411 				    break;
12412 				  }
12413 			      }
12414 
12415 			}
12416 
12417 		      place = 0;
12418 		    }
12419 		}
12420 	    }
12421 	  break;
12422 
12423 	default:
12424 	  /* Any other notes should not be present at this point in the
12425 	     compilation.  */
12426 	  gcc_unreachable ();
12427 	}
12428 
12429       if (place)
12430 	{
12431 	  XEXP (note, 1) = REG_NOTES (place);
12432 	  REG_NOTES (place) = note;
12433 	}
12434       else if ((REG_NOTE_KIND (note) == REG_DEAD
12435 		|| REG_NOTE_KIND (note) == REG_UNUSED)
12436 	       && REG_P (XEXP (note, 0)))
12437 	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12438 
12439       if (place2)
12440 	{
12441 	  if ((REG_NOTE_KIND (note) == REG_DEAD
12442 	       || REG_NOTE_KIND (note) == REG_UNUSED)
12443 	      && REG_P (XEXP (note, 0)))
12444 	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12445 
12446 	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12447 					       REG_NOTE_KIND (note),
12448 					       XEXP (note, 0),
12449 					       REG_NOTES (place2));
12450 	}
12451     }
12452 }
12453 
12454 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12455    I3, I2, and I1 to new locations.  This is also called to add a link
12456    pointing at I3 when I3's destination is changed.  */
12457 
12458 static void
distribute_links(rtx links)12459 distribute_links (rtx links)
12460 {
12461   rtx link, next_link;
12462 
12463   for (link = links; link; link = next_link)
12464     {
12465       rtx place = 0;
12466       rtx insn;
12467       rtx set, reg;
12468 
12469       next_link = XEXP (link, 1);
12470 
12471       /* If the insn that this link points to is a NOTE or isn't a single
12472 	 set, ignore it.  In the latter case, it isn't clear what we
12473 	 can do other than ignore the link, since we can't tell which
12474 	 register it was for.  Such links wouldn't be used by combine
12475 	 anyway.
12476 
12477 	 It is not possible for the destination of the target of the link to
12478 	 have been changed by combine.  The only potential of this is if we
12479 	 replace I3, I2, and I1 by I3 and I2.  But in that case the
12480 	 destination of I2 also remains unchanged.  */
12481 
12482       if (NOTE_P (XEXP (link, 0))
12483 	  || (set = single_set (XEXP (link, 0))) == 0)
12484 	continue;
12485 
12486       reg = SET_DEST (set);
12487       while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12488 	     || GET_CODE (reg) == STRICT_LOW_PART)
12489 	reg = XEXP (reg, 0);
12490 
12491       /* A LOG_LINK is defined as being placed on the first insn that uses
12492 	 a register and points to the insn that sets the register.  Start
12493 	 searching at the next insn after the target of the link and stop
12494 	 when we reach a set of the register or the end of the basic block.
12495 
12496 	 Note that this correctly handles the link that used to point from
12497 	 I3 to I2.  Also note that not much searching is typically done here
12498 	 since most links don't point very far away.  */
12499 
12500       for (insn = NEXT_INSN (XEXP (link, 0));
12501 	   (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
12502 		     || BB_HEAD (this_basic_block->next_bb) != insn));
12503 	   insn = NEXT_INSN (insn))
12504 	if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12505 	  {
12506 	    if (reg_referenced_p (reg, PATTERN (insn)))
12507 	      place = insn;
12508 	    break;
12509 	  }
12510 	else if (CALL_P (insn)
12511 		 && find_reg_fusage (insn, USE, reg))
12512 	  {
12513 	    place = insn;
12514 	    break;
12515 	  }
12516 	else if (INSN_P (insn) && reg_set_p (reg, insn))
12517 	  break;
12518 
12519       /* If we found a place to put the link, place it there unless there
12520 	 is already a link to the same insn as LINK at that point.  */
12521 
12522       if (place)
12523 	{
12524 	  rtx link2;
12525 
12526 	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12527 	    if (XEXP (link2, 0) == XEXP (link, 0))
12528 	      break;
12529 
12530 	  if (link2 == 0)
12531 	    {
12532 	      XEXP (link, 1) = LOG_LINKS (place);
12533 	      LOG_LINKS (place) = link;
12534 
12535 	      /* Set added_links_insn to the earliest insn we added a
12536 		 link to.  */
12537 	      if (added_links_insn == 0
12538 		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
12539 		added_links_insn = place;
12540 	    }
12541 	}
12542     }
12543 }
12544 
12545 /* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
12546    Check whether the expression pointer to by LOC is a register or
12547    memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
12548    Otherwise return zero.  */
12549 
12550 static int
unmentioned_reg_p_1(rtx * loc,void * expr)12551 unmentioned_reg_p_1 (rtx *loc, void *expr)
12552 {
12553   rtx x = *loc;
12554 
12555   if (x != NULL_RTX
12556       && (REG_P (x) || MEM_P (x))
12557       && ! reg_mentioned_p (x, (rtx) expr))
12558     return 1;
12559   return 0;
12560 }
12561 
12562 /* Check for any register or memory mentioned in EQUIV that is not
12563    mentioned in EXPR.  This is used to restrict EQUIV to "specializations"
12564    of EXPR where some registers may have been replaced by constants.  */
12565 
12566 static bool
unmentioned_reg_p(rtx equiv,rtx expr)12567 unmentioned_reg_p (rtx equiv, rtx expr)
12568 {
12569   return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
12570 }
12571 
12572 /* Compute INSN_CUID for INSN, which is an insn made by combine.  */
12573 
12574 static int
insn_cuid(rtx insn)12575 insn_cuid (rtx insn)
12576 {
12577   while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12578 	 && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
12579     insn = NEXT_INSN (insn);
12580 
12581   gcc_assert (INSN_UID (insn) <= max_uid_cuid);
12582 
12583   return INSN_CUID (insn);
12584 }
12585 
12586 void
dump_combine_stats(FILE * file)12587 dump_combine_stats (FILE *file)
12588 {
12589   fprintf
12590     (file,
12591      ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12592      combine_attempts, combine_merges, combine_extras, combine_successes);
12593 }
12594 
12595 void
dump_combine_total_stats(FILE * file)12596 dump_combine_total_stats (FILE *file)
12597 {
12598   fprintf
12599     (file,
12600      "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12601      total_attempts, total_merges, total_extras, total_successes);
12602 }
12603 
12604 
12605 static bool
gate_handle_combine(void)12606 gate_handle_combine (void)
12607 {
12608   return (optimize > 0);
12609 }
12610 
12611 /* Try combining insns through substitution.  */
12612 static unsigned int
rest_of_handle_combine(void)12613 rest_of_handle_combine (void)
12614 {
12615   int rebuild_jump_labels_after_combine
12616     = combine_instructions (get_insns (), max_reg_num ());
12617 
12618   /* Combining insns may have turned an indirect jump into a
12619      direct jump.  Rebuild the JUMP_LABEL fields of jumping
12620      instructions.  */
12621   if (rebuild_jump_labels_after_combine)
12622     {
12623       timevar_push (TV_JUMP);
12624       rebuild_jump_labels (get_insns ());
12625       timevar_pop (TV_JUMP);
12626 
12627       delete_dead_jumptables ();
12628       cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
12629     }
12630   return 0;
12631 }
12632 
12633 struct tree_opt_pass pass_combine =
12634 {
12635   "combine",                            /* name */
12636   gate_handle_combine,                  /* gate */
12637   rest_of_handle_combine,               /* execute */
12638   NULL,                                 /* sub */
12639   NULL,                                 /* next */
12640   0,                                    /* static_pass_number */
12641   TV_COMBINE,                           /* tv_id */
12642   0,                                    /* properties_required */
12643   0,                                    /* properties_provided */
12644   0,                                    /* properties_destroyed */
12645   0,                                    /* todo_flags_start */
12646   TODO_dump_func |
12647   TODO_ggc_collect,                     /* todo_flags_finish */
12648   'c'                                   /* letter */
12649 };
12650 
12651