xref: /dflybsd-src/contrib/gcc-8.0/gcc/combine.c (revision 95059079af47f9a66a175f374f2da1a5020e3255)
138fd1498Szrj /* Optimize by combining instructions for GNU compiler.
238fd1498Szrj    Copyright (C) 1987-2018 Free Software Foundation, Inc.
338fd1498Szrj 
438fd1498Szrj This file is part of GCC.
538fd1498Szrj 
638fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
738fd1498Szrj the terms of the GNU General Public License as published by the Free
838fd1498Szrj Software Foundation; either version 3, or (at your option) any later
938fd1498Szrj version.
1038fd1498Szrj 
1138fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1238fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
1338fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1438fd1498Szrj for more details.
1538fd1498Szrj 
1638fd1498Szrj You should have received a copy of the GNU General Public License
1738fd1498Szrj along with GCC; see the file COPYING3.  If not see
1838fd1498Szrj <http://www.gnu.org/licenses/>.  */
1938fd1498Szrj 
2038fd1498Szrj /* This module is essentially the "combiner" phase of the U. of Arizona
2138fd1498Szrj    Portable Optimizer, but redone to work on our list-structured
2238fd1498Szrj    representation for RTL instead of their string representation.
2338fd1498Szrj 
2438fd1498Szrj    The LOG_LINKS of each insn identify the most recent assignment
2538fd1498Szrj    to each REG used in the insn.  It is a list of previous insns,
2638fd1498Szrj    each of which contains a SET for a REG that is used in this insn
2738fd1498Szrj    and not used or set in between.  LOG_LINKs never cross basic blocks.
2838fd1498Szrj    They were set up by the preceding pass (lifetime analysis).
2938fd1498Szrj 
3038fd1498Szrj    We try to combine each pair of insns joined by a logical link.
3138fd1498Szrj    We also try to combine triplets of insns A, B and C when C has
3238fd1498Szrj    a link back to B and B has a link back to A.  Likewise for a
3338fd1498Szrj    small number of quadruplets of insns A, B, C and D for which
3438fd1498Szrj    there's high likelihood of success.
3538fd1498Szrj 
3638fd1498Szrj    LOG_LINKS does not have links for use of the CC0.  They don't
3738fd1498Szrj    need to, because the insn that sets the CC0 is always immediately
3838fd1498Szrj    before the insn that tests it.  So we always regard a branch
3938fd1498Szrj    insn as having a logical link to the preceding insn.  The same is true
4038fd1498Szrj    for an insn explicitly using CC0.
4138fd1498Szrj 
4238fd1498Szrj    We check (with modified_between_p) to avoid combining in such a way
4338fd1498Szrj    as to move a computation to a place where its value would be different.
4438fd1498Szrj 
4538fd1498Szrj    Combination is done by mathematically substituting the previous
4638fd1498Szrj    insn(s) values for the regs they set into the expressions in
4738fd1498Szrj    the later insns that refer to these regs.  If the result is a valid insn
4838fd1498Szrj    for our target machine, according to the machine description,
4938fd1498Szrj    we install it, delete the earlier insns, and update the data flow
5038fd1498Szrj    information (LOG_LINKS and REG_NOTES) for what we did.
5138fd1498Szrj 
5238fd1498Szrj    There are a few exceptions where the dataflow information isn't
5338fd1498Szrj    completely updated (however this is only a local issue since it is
5438fd1498Szrj    regenerated before the next pass that uses it):
5538fd1498Szrj 
5638fd1498Szrj    - reg_live_length is not updated
5738fd1498Szrj    - reg_n_refs is not adjusted in the rare case when a register is
5838fd1498Szrj      no longer required in a computation
5938fd1498Szrj    - there are extremely rare cases (see distribute_notes) when a
6038fd1498Szrj      REG_DEAD note is lost
6138fd1498Szrj    - a LOG_LINKS entry that refers to an insn with multiple SETs may be
6238fd1498Szrj      removed because there is no way to know which register it was
6338fd1498Szrj      linking
6438fd1498Szrj 
6538fd1498Szrj    To simplify substitution, we combine only when the earlier insn(s)
6638fd1498Szrj    consist of only a single assignment.  To simplify updating afterward,
6738fd1498Szrj    we never combine when a subroutine call appears in the middle.
6838fd1498Szrj 
6938fd1498Szrj    Since we do not represent assignments to CC0 explicitly except when that
7038fd1498Szrj    is all an insn does, there is no LOG_LINKS entry in an insn that uses
7138fd1498Szrj    the condition code for the insn that set the condition code.
7238fd1498Szrj    Fortunately, these two insns must be consecutive.
7338fd1498Szrj    Therefore, every JUMP_INSN is taken to have an implicit logical link
7438fd1498Szrj    to the preceding insn.  This is not quite right, since non-jumps can
7538fd1498Szrj    also use the condition code; but in practice such insns would not
7638fd1498Szrj    combine anyway.  */
7738fd1498Szrj 
7838fd1498Szrj #include "config.h"
7938fd1498Szrj #include "system.h"
8038fd1498Szrj #include "coretypes.h"
8138fd1498Szrj #include "backend.h"
8238fd1498Szrj #include "target.h"
8338fd1498Szrj #include "rtl.h"
8438fd1498Szrj #include "tree.h"
8538fd1498Szrj #include "cfghooks.h"
8638fd1498Szrj #include "predict.h"
8738fd1498Szrj #include "df.h"
8838fd1498Szrj #include "memmodel.h"
8938fd1498Szrj #include "tm_p.h"
9038fd1498Szrj #include "optabs.h"
9138fd1498Szrj #include "regs.h"
9238fd1498Szrj #include "emit-rtl.h"
9338fd1498Szrj #include "recog.h"
9438fd1498Szrj #include "cgraph.h"
9538fd1498Szrj #include "stor-layout.h"
9638fd1498Szrj #include "cfgrtl.h"
9738fd1498Szrj #include "cfgcleanup.h"
9838fd1498Szrj /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
9938fd1498Szrj #include "explow.h"
10038fd1498Szrj #include "insn-attr.h"
10138fd1498Szrj #include "rtlhooks-def.h"
10238fd1498Szrj #include "params.h"
10338fd1498Szrj #include "tree-pass.h"
10438fd1498Szrj #include "valtrack.h"
10538fd1498Szrj #include "rtl-iter.h"
10638fd1498Szrj #include "print-rtl.h"
10738fd1498Szrj 
10838fd1498Szrj /* Number of attempts to combine instructions in this function.  */
10938fd1498Szrj 
11038fd1498Szrj static int combine_attempts;
11138fd1498Szrj 
11238fd1498Szrj /* Number of attempts that got as far as substitution in this function.  */
11338fd1498Szrj 
11438fd1498Szrj static int combine_merges;
11538fd1498Szrj 
11638fd1498Szrj /* Number of instructions combined with added SETs in this function.  */
11738fd1498Szrj 
11838fd1498Szrj static int combine_extras;
11938fd1498Szrj 
12038fd1498Szrj /* Number of instructions combined in this function.  */
12138fd1498Szrj 
12238fd1498Szrj static int combine_successes;
12338fd1498Szrj 
12438fd1498Szrj /* Totals over entire compilation.  */
12538fd1498Szrj 
12638fd1498Szrj static int total_attempts, total_merges, total_extras, total_successes;
12738fd1498Szrj 
12838fd1498Szrj /* combine_instructions may try to replace the right hand side of the
12938fd1498Szrj    second instruction with the value of an associated REG_EQUAL note
13038fd1498Szrj    before throwing it at try_combine.  That is problematic when there
13138fd1498Szrj    is a REG_DEAD note for a register used in the old right hand side
13238fd1498Szrj    and can cause distribute_notes to do wrong things.  This is the
13338fd1498Szrj    second instruction if it has been so modified, null otherwise.  */
13438fd1498Szrj 
13538fd1498Szrj static rtx_insn *i2mod;
13638fd1498Szrj 
13738fd1498Szrj /* When I2MOD is nonnull, this is a copy of the old right hand side.  */
13838fd1498Szrj 
13938fd1498Szrj static rtx i2mod_old_rhs;
14038fd1498Szrj 
14138fd1498Szrj /* When I2MOD is nonnull, this is a copy of the new right hand side.  */
14238fd1498Szrj 
14338fd1498Szrj static rtx i2mod_new_rhs;
14438fd1498Szrj 
14538fd1498Szrj struct reg_stat_type {
14638fd1498Szrj   /* Record last point of death of (hard or pseudo) register n.  */
14738fd1498Szrj   rtx_insn			*last_death;
14838fd1498Szrj 
14938fd1498Szrj   /* Record last point of modification of (hard or pseudo) register n.  */
15038fd1498Szrj   rtx_insn			*last_set;
15138fd1498Szrj 
15238fd1498Szrj   /* The next group of fields allows the recording of the last value assigned
15338fd1498Szrj      to (hard or pseudo) register n.  We use this information to see if an
15438fd1498Szrj      operation being processed is redundant given a prior operation performed
15538fd1498Szrj      on the register.  For example, an `and' with a constant is redundant if
15638fd1498Szrj      all the zero bits are already known to be turned off.
15738fd1498Szrj 
15838fd1498Szrj      We use an approach similar to that used by cse, but change it in the
15938fd1498Szrj      following ways:
16038fd1498Szrj 
16138fd1498Szrj      (1) We do not want to reinitialize at each label.
16238fd1498Szrj      (2) It is useful, but not critical, to know the actual value assigned
16338fd1498Szrj 	 to a register.  Often just its form is helpful.
16438fd1498Szrj 
16538fd1498Szrj      Therefore, we maintain the following fields:
16638fd1498Szrj 
16738fd1498Szrj      last_set_value		the last value assigned
16838fd1498Szrj      last_set_label		records the value of label_tick when the
16938fd1498Szrj 				register was assigned
17038fd1498Szrj      last_set_table_tick	records the value of label_tick when a
17138fd1498Szrj 				value using the register is assigned
17238fd1498Szrj      last_set_invalid		set to nonzero when it is not valid
17338fd1498Szrj 				to use the value of this register in some
17438fd1498Szrj 				register's value
17538fd1498Szrj 
17638fd1498Szrj      To understand the usage of these tables, it is important to understand
17738fd1498Szrj      the distinction between the value in last_set_value being valid and
17838fd1498Szrj      the register being validly contained in some other expression in the
17938fd1498Szrj      table.
18038fd1498Szrj 
18138fd1498Szrj      (The next two parameters are out of date).
18238fd1498Szrj 
18338fd1498Szrj      reg_stat[i].last_set_value is valid if it is nonzero, and either
18438fd1498Szrj      reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
18538fd1498Szrj 
18638fd1498Szrj      Register I may validly appear in any expression returned for the value
18738fd1498Szrj      of another register if reg_n_sets[i] is 1.  It may also appear in the
18838fd1498Szrj      value for register J if reg_stat[j].last_set_invalid is zero, or
18938fd1498Szrj      reg_stat[i].last_set_label < reg_stat[j].last_set_label.
19038fd1498Szrj 
19138fd1498Szrj      If an expression is found in the table containing a register which may
19238fd1498Szrj      not validly appear in an expression, the register is replaced by
19338fd1498Szrj      something that won't match, (clobber (const_int 0)).  */
19438fd1498Szrj 
19538fd1498Szrj   /* Record last value assigned to (hard or pseudo) register n.  */
19638fd1498Szrj 
19738fd1498Szrj   rtx				last_set_value;
19838fd1498Szrj 
19938fd1498Szrj   /* Record the value of label_tick when an expression involving register n
20038fd1498Szrj      is placed in last_set_value.  */
20138fd1498Szrj 
20238fd1498Szrj   int				last_set_table_tick;
20338fd1498Szrj 
20438fd1498Szrj   /* Record the value of label_tick when the value for register n is placed in
20538fd1498Szrj      last_set_value.  */
20638fd1498Szrj 
20738fd1498Szrj   int				last_set_label;
20838fd1498Szrj 
20938fd1498Szrj   /* These fields are maintained in parallel with last_set_value and are
21038fd1498Szrj      used to store the mode in which the register was last set, the bits
21138fd1498Szrj      that were known to be zero when it was last set, and the number of
21238fd1498Szrj      sign bits copies it was known to have when it was last set.  */
21338fd1498Szrj 
21438fd1498Szrj   unsigned HOST_WIDE_INT	last_set_nonzero_bits;
21538fd1498Szrj   char				last_set_sign_bit_copies;
21638fd1498Szrj   ENUM_BITFIELD(machine_mode)	last_set_mode : 8;
21738fd1498Szrj 
21838fd1498Szrj   /* Set nonzero if references to register n in expressions should not be
21938fd1498Szrj      used.  last_set_invalid is set nonzero when this register is being
22038fd1498Szrj      assigned to and last_set_table_tick == label_tick.  */
22138fd1498Szrj 
22238fd1498Szrj   char				last_set_invalid;
22338fd1498Szrj 
22438fd1498Szrj   /* Some registers that are set more than once and used in more than one
22538fd1498Szrj      basic block are nevertheless always set in similar ways.  For example,
22638fd1498Szrj      a QImode register may be loaded from memory in two places on a machine
22738fd1498Szrj      where byte loads zero extend.
22838fd1498Szrj 
22938fd1498Szrj      We record in the following fields if a register has some leading bits
23038fd1498Szrj      that are always equal to the sign bit, and what we know about the
23138fd1498Szrj      nonzero bits of a register, specifically which bits are known to be
23238fd1498Szrj      zero.
23338fd1498Szrj 
23438fd1498Szrj      If an entry is zero, it means that we don't know anything special.  */
23538fd1498Szrj 
23638fd1498Szrj   unsigned char			sign_bit_copies;
23738fd1498Szrj 
23838fd1498Szrj   unsigned HOST_WIDE_INT	nonzero_bits;
23938fd1498Szrj 
24038fd1498Szrj   /* Record the value of the label_tick when the last truncation
24138fd1498Szrj      happened.  The field truncated_to_mode is only valid if
24238fd1498Szrj      truncation_label == label_tick.  */
24338fd1498Szrj 
24438fd1498Szrj   int				truncation_label;
24538fd1498Szrj 
24638fd1498Szrj   /* Record the last truncation seen for this register.  If truncation
24738fd1498Szrj      is not a nop to this mode we might be able to save an explicit
24838fd1498Szrj      truncation if we know that value already contains a truncated
24938fd1498Szrj      value.  */
25038fd1498Szrj 
25138fd1498Szrj   ENUM_BITFIELD(machine_mode)	truncated_to_mode : 8;
25238fd1498Szrj };
25338fd1498Szrj 
25438fd1498Szrj 
25538fd1498Szrj static vec<reg_stat_type> reg_stat;
25638fd1498Szrj 
25738fd1498Szrj /* One plus the highest pseudo for which we track REG_N_SETS.
25838fd1498Szrj    regstat_init_n_sets_and_refs allocates the array for REG_N_SETS just once,
25938fd1498Szrj    but during combine_split_insns new pseudos can be created.  As we don't have
26038fd1498Szrj    updated DF information in that case, it is hard to initialize the array
26138fd1498Szrj    after growing.  The combiner only cares about REG_N_SETS (regno) == 1,
26238fd1498Szrj    so instead of growing the arrays, just assume all newly created pseudos
26338fd1498Szrj    during combine might be set multiple times.  */
26438fd1498Szrj 
26538fd1498Szrj static unsigned int reg_n_sets_max;
26638fd1498Szrj 
26738fd1498Szrj /* Record the luid of the last insn that invalidated memory
26838fd1498Szrj    (anything that writes memory, and subroutine calls, but not pushes).  */
26938fd1498Szrj 
27038fd1498Szrj static int mem_last_set;
27138fd1498Szrj 
27238fd1498Szrj /* Record the luid of the last CALL_INSN
27338fd1498Szrj    so we can tell whether a potential combination crosses any calls.  */
27438fd1498Szrj 
27538fd1498Szrj static int last_call_luid;
27638fd1498Szrj 
27738fd1498Szrj /* When `subst' is called, this is the insn that is being modified
27838fd1498Szrj    (by combining in a previous insn).  The PATTERN of this insn
27938fd1498Szrj    is still the old pattern partially modified and it should not be
28038fd1498Szrj    looked at, but this may be used to examine the successors of the insn
28138fd1498Szrj    to judge whether a simplification is valid.  */
28238fd1498Szrj 
28338fd1498Szrj static rtx_insn *subst_insn;
28438fd1498Szrj 
28538fd1498Szrj /* This is the lowest LUID that `subst' is currently dealing with.
28638fd1498Szrj    get_last_value will not return a value if the register was set at or
28738fd1498Szrj    after this LUID.  If not for this mechanism, we could get confused if
28838fd1498Szrj    I2 or I1 in try_combine were an insn that used the old value of a register
28938fd1498Szrj    to obtain a new value.  In that case, we might erroneously get the
29038fd1498Szrj    new value of the register when we wanted the old one.  */
29138fd1498Szrj 
29238fd1498Szrj static int subst_low_luid;
29338fd1498Szrj 
29438fd1498Szrj /* This contains any hard registers that are used in newpat; reg_dead_at_p
29538fd1498Szrj    must consider all these registers to be always live.  */
29638fd1498Szrj 
29738fd1498Szrj static HARD_REG_SET newpat_used_regs;
29838fd1498Szrj 
29938fd1498Szrj /* This is an insn to which a LOG_LINKS entry has been added.  If this
30038fd1498Szrj    insn is the earlier than I2 or I3, combine should rescan starting at
30138fd1498Szrj    that location.  */
30238fd1498Szrj 
30338fd1498Szrj static rtx_insn *added_links_insn;
30438fd1498Szrj 
30538fd1498Szrj /* And similarly, for notes.  */
30638fd1498Szrj 
30738fd1498Szrj static rtx_insn *added_notes_insn;
30838fd1498Szrj 
30938fd1498Szrj /* Basic block in which we are performing combines.  */
31038fd1498Szrj static basic_block this_basic_block;
31138fd1498Szrj static bool optimize_this_for_speed_p;
31238fd1498Szrj 
31338fd1498Szrj 
31438fd1498Szrj /* Length of the currently allocated uid_insn_cost array.  */
31538fd1498Szrj 
31638fd1498Szrj static int max_uid_known;
31738fd1498Szrj 
31838fd1498Szrj /* The following array records the insn_cost for every insn
31938fd1498Szrj    in the instruction stream.  */
32038fd1498Szrj 
32138fd1498Szrj static int *uid_insn_cost;
32238fd1498Szrj 
32338fd1498Szrj /* The following array records the LOG_LINKS for every insn in the
32438fd1498Szrj    instruction stream as struct insn_link pointers.  */
32538fd1498Szrj 
32638fd1498Szrj struct insn_link {
32738fd1498Szrj   rtx_insn *insn;
32838fd1498Szrj   unsigned int regno;
32938fd1498Szrj   struct insn_link *next;
33038fd1498Szrj };
33138fd1498Szrj 
33238fd1498Szrj static struct insn_link **uid_log_links;
33338fd1498Szrj 
33438fd1498Szrj static inline int
insn_uid_check(const_rtx insn)33538fd1498Szrj insn_uid_check (const_rtx insn)
33638fd1498Szrj {
33738fd1498Szrj   int uid = INSN_UID (insn);
33838fd1498Szrj   gcc_checking_assert (uid <= max_uid_known);
33938fd1498Szrj   return uid;
34038fd1498Szrj }
34138fd1498Szrj 
34238fd1498Szrj #define INSN_COST(INSN)		(uid_insn_cost[insn_uid_check (INSN)])
34338fd1498Szrj #define LOG_LINKS(INSN)		(uid_log_links[insn_uid_check (INSN)])
34438fd1498Szrj 
34538fd1498Szrj #define FOR_EACH_LOG_LINK(L, INSN)				\
34638fd1498Szrj   for ((L) = LOG_LINKS (INSN); (L); (L) = (L)->next)
34738fd1498Szrj 
34838fd1498Szrj /* Links for LOG_LINKS are allocated from this obstack.  */
34938fd1498Szrj 
35038fd1498Szrj static struct obstack insn_link_obstack;
35138fd1498Szrj 
35238fd1498Szrj /* Allocate a link.  */
35338fd1498Szrj 
35438fd1498Szrj static inline struct insn_link *
alloc_insn_link(rtx_insn * insn,unsigned int regno,struct insn_link * next)35538fd1498Szrj alloc_insn_link (rtx_insn *insn, unsigned int regno, struct insn_link *next)
35638fd1498Szrj {
35738fd1498Szrj   struct insn_link *l
35838fd1498Szrj     = (struct insn_link *) obstack_alloc (&insn_link_obstack,
35938fd1498Szrj 					  sizeof (struct insn_link));
36038fd1498Szrj   l->insn = insn;
36138fd1498Szrj   l->regno = regno;
36238fd1498Szrj   l->next = next;
36338fd1498Szrj   return l;
36438fd1498Szrj }
36538fd1498Szrj 
36638fd1498Szrj /* Incremented for each basic block.  */
36738fd1498Szrj 
36838fd1498Szrj static int label_tick;
36938fd1498Szrj 
37038fd1498Szrj /* Reset to label_tick for each extended basic block in scanning order.  */
37138fd1498Szrj 
37238fd1498Szrj static int label_tick_ebb_start;
37338fd1498Szrj 
37438fd1498Szrj /* Mode used to compute significance in reg_stat[].nonzero_bits.  It is the
37538fd1498Szrj    largest integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
37638fd1498Szrj 
37738fd1498Szrj static scalar_int_mode nonzero_bits_mode;
37838fd1498Szrj 
37938fd1498Szrj /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
38038fd1498Szrj    be safely used.  It is zero while computing them and after combine has
38138fd1498Szrj    completed.  This former test prevents propagating values based on
38238fd1498Szrj    previously set values, which can be incorrect if a variable is modified
38338fd1498Szrj    in a loop.  */
38438fd1498Szrj 
38538fd1498Szrj static int nonzero_sign_valid;
38638fd1498Szrj 
38738fd1498Szrj 
38838fd1498Szrj /* Record one modification to rtl structure
38938fd1498Szrj    to be undone by storing old_contents into *where.  */
39038fd1498Szrj 
39138fd1498Szrj enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE, UNDO_LINKS };
39238fd1498Szrj 
39338fd1498Szrj struct undo
39438fd1498Szrj {
39538fd1498Szrj   struct undo *next;
39638fd1498Szrj   enum undo_kind kind;
39738fd1498Szrj   union { rtx r; int i; machine_mode m; struct insn_link *l; } old_contents;
39838fd1498Szrj   union { rtx *r; int *i; struct insn_link **l; } where;
39938fd1498Szrj };
40038fd1498Szrj 
40138fd1498Szrj /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
40238fd1498Szrj    num_undo says how many are currently recorded.
40338fd1498Szrj 
40438fd1498Szrj    other_insn is nonzero if we have modified some other insn in the process
40538fd1498Szrj    of working on subst_insn.  It must be verified too.  */
40638fd1498Szrj 
40738fd1498Szrj struct undobuf
40838fd1498Szrj {
40938fd1498Szrj   struct undo *undos;
41038fd1498Szrj   struct undo *frees;
41138fd1498Szrj   rtx_insn *other_insn;
41238fd1498Szrj };
41338fd1498Szrj 
41438fd1498Szrj static struct undobuf undobuf;
41538fd1498Szrj 
41638fd1498Szrj /* Number of times the pseudo being substituted for
41738fd1498Szrj    was found and replaced.  */
41838fd1498Szrj 
41938fd1498Szrj static int n_occurrences;
42038fd1498Szrj 
42138fd1498Szrj static rtx reg_nonzero_bits_for_combine (const_rtx, scalar_int_mode,
42238fd1498Szrj 					 scalar_int_mode,
42338fd1498Szrj 					 unsigned HOST_WIDE_INT *);
42438fd1498Szrj static rtx reg_num_sign_bit_copies_for_combine (const_rtx, scalar_int_mode,
42538fd1498Szrj 						scalar_int_mode,
42638fd1498Szrj 						unsigned int *);
42738fd1498Szrj static void do_SUBST (rtx *, rtx);
42838fd1498Szrj static void do_SUBST_INT (int *, int);
42938fd1498Szrj static void init_reg_last (void);
43038fd1498Szrj static void setup_incoming_promotions (rtx_insn *);
43138fd1498Szrj static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
43238fd1498Szrj static int cant_combine_insn_p (rtx_insn *);
43338fd1498Szrj static int can_combine_p (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
43438fd1498Szrj 			  rtx_insn *, rtx_insn *, rtx *, rtx *);
43538fd1498Szrj static int combinable_i3pat (rtx_insn *, rtx *, rtx, rtx, rtx, int, int, rtx *);
43638fd1498Szrj static int contains_muldiv (rtx);
43738fd1498Szrj static rtx_insn *try_combine (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
43838fd1498Szrj 			      int *, rtx_insn *);
43938fd1498Szrj static void undo_all (void);
44038fd1498Szrj static void undo_commit (void);
44138fd1498Szrj static rtx *find_split_point (rtx *, rtx_insn *, bool);
44238fd1498Szrj static rtx subst (rtx, rtx, rtx, int, int, int);
44338fd1498Szrj static rtx combine_simplify_rtx (rtx, machine_mode, int, int);
44438fd1498Szrj static rtx simplify_if_then_else (rtx);
44538fd1498Szrj static rtx simplify_set (rtx);
44638fd1498Szrj static rtx simplify_logical (rtx);
44738fd1498Szrj static rtx expand_compound_operation (rtx);
44838fd1498Szrj static const_rtx expand_field_assignment (const_rtx);
44938fd1498Szrj static rtx make_extraction (machine_mode, rtx, HOST_WIDE_INT,
45038fd1498Szrj 			    rtx, unsigned HOST_WIDE_INT, int, int, int);
45138fd1498Szrj static int get_pos_from_mask (unsigned HOST_WIDE_INT,
45238fd1498Szrj 			      unsigned HOST_WIDE_INT *);
45338fd1498Szrj static rtx canon_reg_for_combine (rtx, rtx);
45438fd1498Szrj static rtx force_int_to_mode (rtx, scalar_int_mode, scalar_int_mode,
45538fd1498Szrj 			      scalar_int_mode, unsigned HOST_WIDE_INT, int);
45638fd1498Szrj static rtx force_to_mode (rtx, machine_mode,
45738fd1498Szrj 			  unsigned HOST_WIDE_INT, int);
45838fd1498Szrj static rtx if_then_else_cond (rtx, rtx *, rtx *);
45938fd1498Szrj static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
46038fd1498Szrj static int rtx_equal_for_field_assignment_p (rtx, rtx, bool = false);
46138fd1498Szrj static rtx make_field_assignment (rtx);
46238fd1498Szrj static rtx apply_distributive_law (rtx);
46338fd1498Szrj static rtx distribute_and_simplify_rtx (rtx, int);
46438fd1498Szrj static rtx simplify_and_const_int_1 (scalar_int_mode, rtx,
46538fd1498Szrj 				     unsigned HOST_WIDE_INT);
46638fd1498Szrj static rtx simplify_and_const_int (rtx, scalar_int_mode, rtx,
46738fd1498Szrj 				   unsigned HOST_WIDE_INT);
46838fd1498Szrj static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
46938fd1498Szrj 			    HOST_WIDE_INT, machine_mode, int *);
47038fd1498Szrj static rtx simplify_shift_const_1 (enum rtx_code, machine_mode, rtx, int);
47138fd1498Szrj static rtx simplify_shift_const (rtx, enum rtx_code, machine_mode, rtx,
47238fd1498Szrj 				 int);
47338fd1498Szrj static int recog_for_combine (rtx *, rtx_insn *, rtx *);
47438fd1498Szrj static rtx gen_lowpart_for_combine (machine_mode, rtx);
47538fd1498Szrj static enum rtx_code simplify_compare_const (enum rtx_code, machine_mode,
47638fd1498Szrj 					     rtx, rtx *);
47738fd1498Szrj static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
47838fd1498Szrj static void update_table_tick (rtx);
47938fd1498Szrj static void record_value_for_reg (rtx, rtx_insn *, rtx);
48038fd1498Szrj static void check_promoted_subreg (rtx_insn *, rtx);
48138fd1498Szrj static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
48238fd1498Szrj static void record_dead_and_set_regs (rtx_insn *);
48338fd1498Szrj static int get_last_value_validate (rtx *, rtx_insn *, int, int);
48438fd1498Szrj static rtx get_last_value (const_rtx);
48538fd1498Szrj static void reg_dead_at_p_1 (rtx, const_rtx, void *);
48638fd1498Szrj static int reg_dead_at_p (rtx, rtx_insn *);
48738fd1498Szrj static void move_deaths (rtx, rtx, int, rtx_insn *, rtx *);
48838fd1498Szrj static int reg_bitfield_target_p (rtx, rtx);
48938fd1498Szrj static void distribute_notes (rtx, rtx_insn *, rtx_insn *, rtx_insn *, rtx, rtx, rtx);
49038fd1498Szrj static void distribute_links (struct insn_link *);
49138fd1498Szrj static void mark_used_regs_combine (rtx);
49238fd1498Szrj static void record_promoted_value (rtx_insn *, rtx);
49338fd1498Szrj static bool unmentioned_reg_p (rtx, rtx);
49438fd1498Szrj static void record_truncated_values (rtx *, void *);
49538fd1498Szrj static bool reg_truncated_to_mode (machine_mode, const_rtx);
49638fd1498Szrj static rtx gen_lowpart_or_truncate (machine_mode, rtx);
49738fd1498Szrj 
49838fd1498Szrj 
49938fd1498Szrj /* It is not safe to use ordinary gen_lowpart in combine.
50038fd1498Szrj    See comments in gen_lowpart_for_combine.  */
50138fd1498Szrj #undef RTL_HOOKS_GEN_LOWPART
50238fd1498Szrj #define RTL_HOOKS_GEN_LOWPART              gen_lowpart_for_combine
50338fd1498Szrj 
50438fd1498Szrj /* Our implementation of gen_lowpart never emits a new pseudo.  */
50538fd1498Szrj #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
50638fd1498Szrj #define RTL_HOOKS_GEN_LOWPART_NO_EMIT      gen_lowpart_for_combine
50738fd1498Szrj 
50838fd1498Szrj #undef RTL_HOOKS_REG_NONZERO_REG_BITS
50938fd1498Szrj #define RTL_HOOKS_REG_NONZERO_REG_BITS     reg_nonzero_bits_for_combine
51038fd1498Szrj 
51138fd1498Szrj #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
51238fd1498Szrj #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES  reg_num_sign_bit_copies_for_combine
51338fd1498Szrj 
51438fd1498Szrj #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
51538fd1498Szrj #define RTL_HOOKS_REG_TRUNCATED_TO_MODE    reg_truncated_to_mode
51638fd1498Szrj 
51738fd1498Szrj static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
51838fd1498Szrj 
51938fd1498Szrj 
52038fd1498Szrj /* Convenience wrapper for the canonicalize_comparison target hook.
52138fd1498Szrj    Target hooks cannot use enum rtx_code.  */
52238fd1498Szrj static inline void
target_canonicalize_comparison(enum rtx_code * code,rtx * op0,rtx * op1,bool op0_preserve_value)52338fd1498Szrj target_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1,
52438fd1498Szrj 				bool op0_preserve_value)
52538fd1498Szrj {
52638fd1498Szrj   int code_int = (int)*code;
52738fd1498Szrj   targetm.canonicalize_comparison (&code_int, op0, op1, op0_preserve_value);
52838fd1498Szrj   *code = (enum rtx_code)code_int;
52938fd1498Szrj }
53038fd1498Szrj 
53138fd1498Szrj /* Try to split PATTERN found in INSN.  This returns NULL_RTX if
53238fd1498Szrj    PATTERN can not be split.  Otherwise, it returns an insn sequence.
53338fd1498Szrj    This is a wrapper around split_insns which ensures that the
53438fd1498Szrj    reg_stat vector is made larger if the splitter creates a new
53538fd1498Szrj    register.  */
53638fd1498Szrj 
53738fd1498Szrj static rtx_insn *
combine_split_insns(rtx pattern,rtx_insn * insn)53838fd1498Szrj combine_split_insns (rtx pattern, rtx_insn *insn)
53938fd1498Szrj {
54038fd1498Szrj   rtx_insn *ret;
54138fd1498Szrj   unsigned int nregs;
54238fd1498Szrj 
54338fd1498Szrj   ret = split_insns (pattern, insn);
54438fd1498Szrj   nregs = max_reg_num ();
54538fd1498Szrj   if (nregs > reg_stat.length ())
54638fd1498Szrj     reg_stat.safe_grow_cleared (nregs);
54738fd1498Szrj   return ret;
54838fd1498Szrj }
54938fd1498Szrj 
55038fd1498Szrj /* This is used by find_single_use to locate an rtx in LOC that
55138fd1498Szrj    contains exactly one use of DEST, which is typically either a REG
55238fd1498Szrj    or CC0.  It returns a pointer to the innermost rtx expression
55338fd1498Szrj    containing DEST.  Appearances of DEST that are being used to
55438fd1498Szrj    totally replace it are not counted.  */
55538fd1498Szrj 
55638fd1498Szrj static rtx *
find_single_use_1(rtx dest,rtx * loc)55738fd1498Szrj find_single_use_1 (rtx dest, rtx *loc)
55838fd1498Szrj {
55938fd1498Szrj   rtx x = *loc;
56038fd1498Szrj   enum rtx_code code = GET_CODE (x);
56138fd1498Szrj   rtx *result = NULL;
56238fd1498Szrj   rtx *this_result;
56338fd1498Szrj   int i;
56438fd1498Szrj   const char *fmt;
56538fd1498Szrj 
56638fd1498Szrj   switch (code)
56738fd1498Szrj     {
56838fd1498Szrj     case CONST:
56938fd1498Szrj     case LABEL_REF:
57038fd1498Szrj     case SYMBOL_REF:
57138fd1498Szrj     CASE_CONST_ANY:
57238fd1498Szrj     case CLOBBER:
57338fd1498Szrj       return 0;
57438fd1498Szrj 
57538fd1498Szrj     case SET:
57638fd1498Szrj       /* If the destination is anything other than CC0, PC, a REG or a SUBREG
57738fd1498Szrj 	 of a REG that occupies all of the REG, the insn uses DEST if
57838fd1498Szrj 	 it is mentioned in the destination or the source.  Otherwise, we
57938fd1498Szrj 	 need just check the source.  */
58038fd1498Szrj       if (GET_CODE (SET_DEST (x)) != CC0
58138fd1498Szrj 	  && GET_CODE (SET_DEST (x)) != PC
58238fd1498Szrj 	  && !REG_P (SET_DEST (x))
58338fd1498Szrj 	  && ! (GET_CODE (SET_DEST (x)) == SUBREG
58438fd1498Szrj 		&& REG_P (SUBREG_REG (SET_DEST (x)))
58538fd1498Szrj 		&& !read_modify_subreg_p (SET_DEST (x))))
58638fd1498Szrj 	break;
58738fd1498Szrj 
58838fd1498Szrj       return find_single_use_1 (dest, &SET_SRC (x));
58938fd1498Szrj 
59038fd1498Szrj     case MEM:
59138fd1498Szrj     case SUBREG:
59238fd1498Szrj       return find_single_use_1 (dest, &XEXP (x, 0));
59338fd1498Szrj 
59438fd1498Szrj     default:
59538fd1498Szrj       break;
59638fd1498Szrj     }
59738fd1498Szrj 
59838fd1498Szrj   /* If it wasn't one of the common cases above, check each expression and
59938fd1498Szrj      vector of this code.  Look for a unique usage of DEST.  */
60038fd1498Szrj 
60138fd1498Szrj   fmt = GET_RTX_FORMAT (code);
60238fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
60338fd1498Szrj     {
60438fd1498Szrj       if (fmt[i] == 'e')
60538fd1498Szrj 	{
60638fd1498Szrj 	  if (dest == XEXP (x, i)
60738fd1498Szrj 	      || (REG_P (dest) && REG_P (XEXP (x, i))
60838fd1498Szrj 		  && REGNO (dest) == REGNO (XEXP (x, i))))
60938fd1498Szrj 	    this_result = loc;
61038fd1498Szrj 	  else
61138fd1498Szrj 	    this_result = find_single_use_1 (dest, &XEXP (x, i));
61238fd1498Szrj 
61338fd1498Szrj 	  if (result == NULL)
61438fd1498Szrj 	    result = this_result;
61538fd1498Szrj 	  else if (this_result)
61638fd1498Szrj 	    /* Duplicate usage.  */
61738fd1498Szrj 	    return NULL;
61838fd1498Szrj 	}
61938fd1498Szrj       else if (fmt[i] == 'E')
62038fd1498Szrj 	{
62138fd1498Szrj 	  int j;
62238fd1498Szrj 
62338fd1498Szrj 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
62438fd1498Szrj 	    {
62538fd1498Szrj 	      if (XVECEXP (x, i, j) == dest
62638fd1498Szrj 		  || (REG_P (dest)
62738fd1498Szrj 		      && REG_P (XVECEXP (x, i, j))
62838fd1498Szrj 		      && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
62938fd1498Szrj 		this_result = loc;
63038fd1498Szrj 	      else
63138fd1498Szrj 		this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
63238fd1498Szrj 
63338fd1498Szrj 	      if (result == NULL)
63438fd1498Szrj 		result = this_result;
63538fd1498Szrj 	      else if (this_result)
63638fd1498Szrj 		return NULL;
63738fd1498Szrj 	    }
63838fd1498Szrj 	}
63938fd1498Szrj     }
64038fd1498Szrj 
64138fd1498Szrj   return result;
64238fd1498Szrj }
64338fd1498Szrj 
64438fd1498Szrj 
64538fd1498Szrj /* See if DEST, produced in INSN, is used only a single time in the
64638fd1498Szrj    sequel.  If so, return a pointer to the innermost rtx expression in which
64738fd1498Szrj    it is used.
64838fd1498Szrj 
64938fd1498Szrj    If PLOC is nonzero, *PLOC is set to the insn containing the single use.
65038fd1498Szrj 
65138fd1498Szrj    If DEST is cc0_rtx, we look only at the next insn.  In that case, we don't
65238fd1498Szrj    care about REG_DEAD notes or LOG_LINKS.
65338fd1498Szrj 
65438fd1498Szrj    Otherwise, we find the single use by finding an insn that has a
65538fd1498Szrj    LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST.  If DEST is
65638fd1498Szrj    only referenced once in that insn, we know that it must be the first
65738fd1498Szrj    and last insn referencing DEST.  */
65838fd1498Szrj 
65938fd1498Szrj static rtx *
find_single_use(rtx dest,rtx_insn * insn,rtx_insn ** ploc)66038fd1498Szrj find_single_use (rtx dest, rtx_insn *insn, rtx_insn **ploc)
66138fd1498Szrj {
66238fd1498Szrj   basic_block bb;
66338fd1498Szrj   rtx_insn *next;
66438fd1498Szrj   rtx *result;
66538fd1498Szrj   struct insn_link *link;
66638fd1498Szrj 
66738fd1498Szrj   if (dest == cc0_rtx)
66838fd1498Szrj     {
66938fd1498Szrj       next = NEXT_INSN (insn);
67038fd1498Szrj       if (next == 0
67138fd1498Szrj 	  || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
67238fd1498Szrj 	return 0;
67338fd1498Szrj 
67438fd1498Szrj       result = find_single_use_1 (dest, &PATTERN (next));
67538fd1498Szrj       if (result && ploc)
67638fd1498Szrj 	*ploc = next;
67738fd1498Szrj       return result;
67838fd1498Szrj     }
67938fd1498Szrj 
68038fd1498Szrj   if (!REG_P (dest))
68138fd1498Szrj     return 0;
68238fd1498Szrj 
68338fd1498Szrj   bb = BLOCK_FOR_INSN (insn);
68438fd1498Szrj   for (next = NEXT_INSN (insn);
68538fd1498Szrj        next && BLOCK_FOR_INSN (next) == bb;
68638fd1498Szrj        next = NEXT_INSN (next))
68738fd1498Szrj     if (NONDEBUG_INSN_P (next) && dead_or_set_p (next, dest))
68838fd1498Szrj       {
68938fd1498Szrj 	FOR_EACH_LOG_LINK (link, next)
69038fd1498Szrj 	  if (link->insn == insn && link->regno == REGNO (dest))
69138fd1498Szrj 	    break;
69238fd1498Szrj 
69338fd1498Szrj 	if (link)
69438fd1498Szrj 	  {
69538fd1498Szrj 	    result = find_single_use_1 (dest, &PATTERN (next));
69638fd1498Szrj 	    if (ploc)
69738fd1498Szrj 	      *ploc = next;
69838fd1498Szrj 	    return result;
69938fd1498Szrj 	  }
70038fd1498Szrj       }
70138fd1498Szrj 
70238fd1498Szrj   return 0;
70338fd1498Szrj }
70438fd1498Szrj 
70538fd1498Szrj /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
70638fd1498Szrj    insn.  The substitution can be undone by undo_all.  If INTO is already
70738fd1498Szrj    set to NEWVAL, do not record this change.  Because computing NEWVAL might
70838fd1498Szrj    also call SUBST, we have to compute it before we put anything into
70938fd1498Szrj    the undo table.  */
71038fd1498Szrj 
71138fd1498Szrj static void
do_SUBST(rtx * into,rtx newval)71238fd1498Szrj do_SUBST (rtx *into, rtx newval)
71338fd1498Szrj {
71438fd1498Szrj   struct undo *buf;
71538fd1498Szrj   rtx oldval = *into;
71638fd1498Szrj 
71738fd1498Szrj   if (oldval == newval)
71838fd1498Szrj     return;
71938fd1498Szrj 
72038fd1498Szrj   /* We'd like to catch as many invalid transformations here as
72138fd1498Szrj      possible.  Unfortunately, there are way too many mode changes
72238fd1498Szrj      that are perfectly valid, so we'd waste too much effort for
72338fd1498Szrj      little gain doing the checks here.  Focus on catching invalid
72438fd1498Szrj      transformations involving integer constants.  */
72538fd1498Szrj   if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
72638fd1498Szrj       && CONST_INT_P (newval))
72738fd1498Szrj     {
72838fd1498Szrj       /* Sanity check that we're replacing oldval with a CONST_INT
72938fd1498Szrj 	 that is a valid sign-extension for the original mode.  */
73038fd1498Szrj       gcc_assert (INTVAL (newval)
73138fd1498Szrj 		  == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
73238fd1498Szrj 
73338fd1498Szrj       /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
73438fd1498Szrj 	 CONST_INT is not valid, because after the replacement, the
73538fd1498Szrj 	 original mode would be gone.  Unfortunately, we can't tell
73638fd1498Szrj 	 when do_SUBST is called to replace the operand thereof, so we
73738fd1498Szrj 	 perform this test on oldval instead, checking whether an
73838fd1498Szrj 	 invalid replacement took place before we got here.  */
73938fd1498Szrj       gcc_assert (!(GET_CODE (oldval) == SUBREG
74038fd1498Szrj 		    && CONST_INT_P (SUBREG_REG (oldval))));
74138fd1498Szrj       gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
74238fd1498Szrj 		    && CONST_INT_P (XEXP (oldval, 0))));
74338fd1498Szrj     }
74438fd1498Szrj 
74538fd1498Szrj   if (undobuf.frees)
74638fd1498Szrj     buf = undobuf.frees, undobuf.frees = buf->next;
74738fd1498Szrj   else
74838fd1498Szrj     buf = XNEW (struct undo);
74938fd1498Szrj 
75038fd1498Szrj   buf->kind = UNDO_RTX;
75138fd1498Szrj   buf->where.r = into;
75238fd1498Szrj   buf->old_contents.r = oldval;
75338fd1498Szrj   *into = newval;
75438fd1498Szrj 
75538fd1498Szrj   buf->next = undobuf.undos, undobuf.undos = buf;
75638fd1498Szrj }
75738fd1498Szrj 
75838fd1498Szrj #define SUBST(INTO, NEWVAL)	do_SUBST (&(INTO), (NEWVAL))
75938fd1498Szrj 
76038fd1498Szrj /* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
76138fd1498Szrj    for the value of a HOST_WIDE_INT value (including CONST_INT) is
76238fd1498Szrj    not safe.  */
76338fd1498Szrj 
76438fd1498Szrj static void
do_SUBST_INT(int * into,int newval)76538fd1498Szrj do_SUBST_INT (int *into, int newval)
76638fd1498Szrj {
76738fd1498Szrj   struct undo *buf;
76838fd1498Szrj   int oldval = *into;
76938fd1498Szrj 
77038fd1498Szrj   if (oldval == newval)
77138fd1498Szrj     return;
77238fd1498Szrj 
77338fd1498Szrj   if (undobuf.frees)
77438fd1498Szrj     buf = undobuf.frees, undobuf.frees = buf->next;
77538fd1498Szrj   else
77638fd1498Szrj     buf = XNEW (struct undo);
77738fd1498Szrj 
77838fd1498Szrj   buf->kind = UNDO_INT;
77938fd1498Szrj   buf->where.i = into;
78038fd1498Szrj   buf->old_contents.i = oldval;
78138fd1498Szrj   *into = newval;
78238fd1498Szrj 
78338fd1498Szrj   buf->next = undobuf.undos, undobuf.undos = buf;
78438fd1498Szrj }
78538fd1498Szrj 
78638fd1498Szrj #define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT (&(INTO), (NEWVAL))
78738fd1498Szrj 
78838fd1498Szrj /* Similar to SUBST, but just substitute the mode.  This is used when
78938fd1498Szrj    changing the mode of a pseudo-register, so that any other
79038fd1498Szrj    references to the entry in the regno_reg_rtx array will change as
79138fd1498Szrj    well.  */
79238fd1498Szrj 
79338fd1498Szrj static void
do_SUBST_MODE(rtx * into,machine_mode newval)79438fd1498Szrj do_SUBST_MODE (rtx *into, machine_mode newval)
79538fd1498Szrj {
79638fd1498Szrj   struct undo *buf;
79738fd1498Szrj   machine_mode oldval = GET_MODE (*into);
79838fd1498Szrj 
79938fd1498Szrj   if (oldval == newval)
80038fd1498Szrj     return;
80138fd1498Szrj 
80238fd1498Szrj   if (undobuf.frees)
80338fd1498Szrj     buf = undobuf.frees, undobuf.frees = buf->next;
80438fd1498Szrj   else
80538fd1498Szrj     buf = XNEW (struct undo);
80638fd1498Szrj 
80738fd1498Szrj   buf->kind = UNDO_MODE;
80838fd1498Szrj   buf->where.r = into;
80938fd1498Szrj   buf->old_contents.m = oldval;
81038fd1498Szrj   adjust_reg_mode (*into, newval);
81138fd1498Szrj 
81238fd1498Szrj   buf->next = undobuf.undos, undobuf.undos = buf;
81338fd1498Szrj }
81438fd1498Szrj 
81538fd1498Szrj #define SUBST_MODE(INTO, NEWVAL)  do_SUBST_MODE (&(INTO), (NEWVAL))
81638fd1498Szrj 
81738fd1498Szrj /* Similar to SUBST, but NEWVAL is a LOG_LINKS expression.  */
81838fd1498Szrj 
81938fd1498Szrj static void
do_SUBST_LINK(struct insn_link ** into,struct insn_link * newval)82038fd1498Szrj do_SUBST_LINK (struct insn_link **into, struct insn_link *newval)
82138fd1498Szrj {
82238fd1498Szrj   struct undo *buf;
82338fd1498Szrj   struct insn_link * oldval = *into;
82438fd1498Szrj 
82538fd1498Szrj   if (oldval == newval)
82638fd1498Szrj     return;
82738fd1498Szrj 
82838fd1498Szrj   if (undobuf.frees)
82938fd1498Szrj     buf = undobuf.frees, undobuf.frees = buf->next;
83038fd1498Szrj   else
83138fd1498Szrj     buf = XNEW (struct undo);
83238fd1498Szrj 
83338fd1498Szrj   buf->kind = UNDO_LINKS;
83438fd1498Szrj   buf->where.l = into;
83538fd1498Szrj   buf->old_contents.l = oldval;
83638fd1498Szrj   *into = newval;
83738fd1498Szrj 
83838fd1498Szrj   buf->next = undobuf.undos, undobuf.undos = buf;
83938fd1498Szrj }
84038fd1498Szrj 
84138fd1498Szrj #define SUBST_LINK(oldval, newval) do_SUBST_LINK (&oldval, newval)
84238fd1498Szrj 
84338fd1498Szrj /* Subroutine of try_combine.  Determine whether the replacement patterns
84438fd1498Szrj    NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to insn_cost
84538fd1498Szrj    than the original sequence I0, I1, I2, I3 and undobuf.other_insn.  Note
84638fd1498Szrj    that I0, I1 and/or NEWI2PAT may be NULL_RTX.  Similarly, NEWOTHERPAT and
84738fd1498Szrj    undobuf.other_insn may also both be NULL_RTX.  Return false if the cost
84838fd1498Szrj    of all the instructions can be estimated and the replacements are more
84938fd1498Szrj    expensive than the original sequence.  */
85038fd1498Szrj 
85138fd1498Szrj static bool
combine_validate_cost(rtx_insn * i0,rtx_insn * i1,rtx_insn * i2,rtx_insn * i3,rtx newpat,rtx newi2pat,rtx newotherpat)85238fd1498Szrj combine_validate_cost (rtx_insn *i0, rtx_insn *i1, rtx_insn *i2, rtx_insn *i3,
85338fd1498Szrj 		       rtx newpat, rtx newi2pat, rtx newotherpat)
85438fd1498Szrj {
85538fd1498Szrj   int i0_cost, i1_cost, i2_cost, i3_cost;
85638fd1498Szrj   int new_i2_cost, new_i3_cost;
85738fd1498Szrj   int old_cost, new_cost;
85838fd1498Szrj 
85938fd1498Szrj   /* Lookup the original insn_costs.  */
86038fd1498Szrj   i2_cost = INSN_COST (i2);
86138fd1498Szrj   i3_cost = INSN_COST (i3);
86238fd1498Szrj 
86338fd1498Szrj   if (i1)
86438fd1498Szrj     {
86538fd1498Szrj       i1_cost = INSN_COST (i1);
86638fd1498Szrj       if (i0)
86738fd1498Szrj 	{
86838fd1498Szrj 	  i0_cost = INSN_COST (i0);
86938fd1498Szrj 	  old_cost = (i0_cost > 0 && i1_cost > 0 && i2_cost > 0 && i3_cost > 0
87038fd1498Szrj 		      ? i0_cost + i1_cost + i2_cost + i3_cost : 0);
87138fd1498Szrj 	}
87238fd1498Szrj       else
87338fd1498Szrj 	{
87438fd1498Szrj 	  old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0
87538fd1498Szrj 		      ? i1_cost + i2_cost + i3_cost : 0);
87638fd1498Szrj 	  i0_cost = 0;
87738fd1498Szrj 	}
87838fd1498Szrj     }
87938fd1498Szrj   else
88038fd1498Szrj     {
88138fd1498Szrj       old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
88238fd1498Szrj       i1_cost = i0_cost = 0;
88338fd1498Szrj     }
88438fd1498Szrj 
88538fd1498Szrj   /* If we have split a PARALLEL I2 to I1,I2, we have counted its cost twice;
88638fd1498Szrj      correct that.  */
88738fd1498Szrj   if (old_cost && i1 && INSN_UID (i1) == INSN_UID (i2))
88838fd1498Szrj     old_cost -= i1_cost;
88938fd1498Szrj 
89038fd1498Szrj 
89138fd1498Szrj   /* Calculate the replacement insn_costs.  */
89238fd1498Szrj   rtx tmp = PATTERN (i3);
89338fd1498Szrj   PATTERN (i3) = newpat;
89438fd1498Szrj   int tmpi = INSN_CODE (i3);
89538fd1498Szrj   INSN_CODE (i3) = -1;
89638fd1498Szrj   new_i3_cost = insn_cost (i3, optimize_this_for_speed_p);
89738fd1498Szrj   PATTERN (i3) = tmp;
89838fd1498Szrj   INSN_CODE (i3) = tmpi;
89938fd1498Szrj   if (newi2pat)
90038fd1498Szrj     {
90138fd1498Szrj       tmp = PATTERN (i2);
90238fd1498Szrj       PATTERN (i2) = newi2pat;
90338fd1498Szrj       tmpi = INSN_CODE (i2);
90438fd1498Szrj       INSN_CODE (i2) = -1;
90538fd1498Szrj       new_i2_cost = insn_cost (i2, optimize_this_for_speed_p);
90638fd1498Szrj       PATTERN (i2) = tmp;
90738fd1498Szrj       INSN_CODE (i2) = tmpi;
90838fd1498Szrj       new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
90938fd1498Szrj 		 ? new_i2_cost + new_i3_cost : 0;
91038fd1498Szrj     }
91138fd1498Szrj   else
91238fd1498Szrj     {
91338fd1498Szrj       new_cost = new_i3_cost;
91438fd1498Szrj       new_i2_cost = 0;
91538fd1498Szrj     }
91638fd1498Szrj 
91738fd1498Szrj   if (undobuf.other_insn)
91838fd1498Szrj     {
91938fd1498Szrj       int old_other_cost, new_other_cost;
92038fd1498Szrj 
92138fd1498Szrj       old_other_cost = INSN_COST (undobuf.other_insn);
92238fd1498Szrj       tmp = PATTERN (undobuf.other_insn);
92338fd1498Szrj       PATTERN (undobuf.other_insn) = newotherpat;
92438fd1498Szrj       tmpi = INSN_CODE (undobuf.other_insn);
92538fd1498Szrj       INSN_CODE (undobuf.other_insn) = -1;
92638fd1498Szrj       new_other_cost = insn_cost (undobuf.other_insn,
92738fd1498Szrj 				  optimize_this_for_speed_p);
92838fd1498Szrj       PATTERN (undobuf.other_insn) = tmp;
92938fd1498Szrj       INSN_CODE (undobuf.other_insn) = tmpi;
93038fd1498Szrj       if (old_other_cost > 0 && new_other_cost > 0)
93138fd1498Szrj 	{
93238fd1498Szrj 	  old_cost += old_other_cost;
93338fd1498Szrj 	  new_cost += new_other_cost;
93438fd1498Szrj 	}
93538fd1498Szrj       else
93638fd1498Szrj 	old_cost = 0;
93738fd1498Szrj     }
93838fd1498Szrj 
93938fd1498Szrj   /* Disallow this combination if both new_cost and old_cost are greater than
94038fd1498Szrj      zero, and new_cost is greater than old cost.  */
94138fd1498Szrj   int reject = old_cost > 0 && new_cost > old_cost;
94238fd1498Szrj 
94338fd1498Szrj   if (dump_file)
94438fd1498Szrj     {
94538fd1498Szrj       fprintf (dump_file, "%s combination of insns ",
94638fd1498Szrj 	       reject ? "rejecting" : "allowing");
94738fd1498Szrj       if (i0)
94838fd1498Szrj 	fprintf (dump_file, "%d, ", INSN_UID (i0));
94938fd1498Szrj       if (i1 && INSN_UID (i1) != INSN_UID (i2))
95038fd1498Szrj 	fprintf (dump_file, "%d, ", INSN_UID (i1));
95138fd1498Szrj       fprintf (dump_file, "%d and %d\n", INSN_UID (i2), INSN_UID (i3));
95238fd1498Szrj 
95338fd1498Szrj       fprintf (dump_file, "original costs ");
95438fd1498Szrj       if (i0)
95538fd1498Szrj 	fprintf (dump_file, "%d + ", i0_cost);
95638fd1498Szrj       if (i1 && INSN_UID (i1) != INSN_UID (i2))
95738fd1498Szrj 	fprintf (dump_file, "%d + ", i1_cost);
95838fd1498Szrj       fprintf (dump_file, "%d + %d = %d\n", i2_cost, i3_cost, old_cost);
95938fd1498Szrj 
96038fd1498Szrj       if (newi2pat)
96138fd1498Szrj 	fprintf (dump_file, "replacement costs %d + %d = %d\n",
96238fd1498Szrj 		 new_i2_cost, new_i3_cost, new_cost);
96338fd1498Szrj       else
96438fd1498Szrj 	fprintf (dump_file, "replacement cost %d\n", new_cost);
96538fd1498Szrj     }
96638fd1498Szrj 
96738fd1498Szrj   if (reject)
96838fd1498Szrj     return false;
96938fd1498Szrj 
97038fd1498Szrj   /* Update the uid_insn_cost array with the replacement costs.  */
97138fd1498Szrj   INSN_COST (i2) = new_i2_cost;
97238fd1498Szrj   INSN_COST (i3) = new_i3_cost;
97338fd1498Szrj   if (i1)
97438fd1498Szrj     {
97538fd1498Szrj       INSN_COST (i1) = 0;
97638fd1498Szrj       if (i0)
97738fd1498Szrj 	INSN_COST (i0) = 0;
97838fd1498Szrj     }
97938fd1498Szrj 
98038fd1498Szrj   return true;
98138fd1498Szrj }
98238fd1498Szrj 
98338fd1498Szrj 
984*58e805e6Szrj /* Delete any insns that copy a register to itself.
985*58e805e6Szrj    Return true if the CFG was changed.  */
98638fd1498Szrj 
987*58e805e6Szrj static bool
delete_noop_moves(void)98838fd1498Szrj delete_noop_moves (void)
98938fd1498Szrj {
99038fd1498Szrj   rtx_insn *insn, *next;
99138fd1498Szrj   basic_block bb;
99238fd1498Szrj 
993*58e805e6Szrj   bool edges_deleted = false;
994*58e805e6Szrj 
99538fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
99638fd1498Szrj     {
99738fd1498Szrj       for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
99838fd1498Szrj 	{
99938fd1498Szrj 	  next = NEXT_INSN (insn);
100038fd1498Szrj 	  if (INSN_P (insn) && noop_move_p (insn))
100138fd1498Szrj 	    {
100238fd1498Szrj 	      if (dump_file)
100338fd1498Szrj 		fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
100438fd1498Szrj 
1005*58e805e6Szrj 	      edges_deleted |= delete_insn_and_edges (insn);
100638fd1498Szrj 	    }
100738fd1498Szrj 	}
100838fd1498Szrj     }
1009*58e805e6Szrj 
1010*58e805e6Szrj   return edges_deleted;
101138fd1498Szrj }
101238fd1498Szrj 
101338fd1498Szrj 
101438fd1498Szrj /* Return false if we do not want to (or cannot) combine DEF.  */
101538fd1498Szrj static bool
can_combine_def_p(df_ref def)101638fd1498Szrj can_combine_def_p (df_ref def)
101738fd1498Szrj {
101838fd1498Szrj   /* Do not consider if it is pre/post modification in MEM.  */
101938fd1498Szrj   if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
102038fd1498Szrj     return false;
102138fd1498Szrj 
102238fd1498Szrj   unsigned int regno = DF_REF_REGNO (def);
102338fd1498Szrj 
102438fd1498Szrj   /* Do not combine frame pointer adjustments.  */
102538fd1498Szrj   if ((regno == FRAME_POINTER_REGNUM
102638fd1498Szrj        && (!reload_completed || frame_pointer_needed))
102738fd1498Szrj       || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
102838fd1498Szrj 	  && regno == HARD_FRAME_POINTER_REGNUM
102938fd1498Szrj 	  && (!reload_completed || frame_pointer_needed))
103038fd1498Szrj       || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
103138fd1498Szrj 	  && regno == ARG_POINTER_REGNUM && fixed_regs[regno]))
103238fd1498Szrj     return false;
103338fd1498Szrj 
103438fd1498Szrj   return true;
103538fd1498Szrj }
103638fd1498Szrj 
103738fd1498Szrj /* Return false if we do not want to (or cannot) combine USE.  */
103838fd1498Szrj static bool
can_combine_use_p(df_ref use)103938fd1498Szrj can_combine_use_p (df_ref use)
104038fd1498Szrj {
104138fd1498Szrj   /* Do not consider the usage of the stack pointer by function call.  */
104238fd1498Szrj   if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
104338fd1498Szrj     return false;
104438fd1498Szrj 
104538fd1498Szrj   return true;
104638fd1498Szrj }
104738fd1498Szrj 
104838fd1498Szrj /* Fill in log links field for all insns.  */
104938fd1498Szrj 
105038fd1498Szrj static void
create_log_links(void)105138fd1498Szrj create_log_links (void)
105238fd1498Szrj {
105338fd1498Szrj   basic_block bb;
105438fd1498Szrj   rtx_insn **next_use;
105538fd1498Szrj   rtx_insn *insn;
105638fd1498Szrj   df_ref def, use;
105738fd1498Szrj 
105838fd1498Szrj   next_use = XCNEWVEC (rtx_insn *, max_reg_num ());
105938fd1498Szrj 
106038fd1498Szrj   /* Pass through each block from the end, recording the uses of each
106138fd1498Szrj      register and establishing log links when def is encountered.
106238fd1498Szrj      Note that we do not clear next_use array in order to save time,
106338fd1498Szrj      so we have to test whether the use is in the same basic block as def.
106438fd1498Szrj 
106538fd1498Szrj      There are a few cases below when we do not consider the definition or
106638fd1498Szrj      usage -- these are taken from original flow.c did. Don't ask me why it is
106738fd1498Szrj      done this way; I don't know and if it works, I don't want to know.  */
106838fd1498Szrj 
106938fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
107038fd1498Szrj     {
107138fd1498Szrj       FOR_BB_INSNS_REVERSE (bb, insn)
107238fd1498Szrj         {
107338fd1498Szrj           if (!NONDEBUG_INSN_P (insn))
107438fd1498Szrj             continue;
107538fd1498Szrj 
107638fd1498Szrj 	  /* Log links are created only once.  */
107738fd1498Szrj 	  gcc_assert (!LOG_LINKS (insn));
107838fd1498Szrj 
107938fd1498Szrj 	  FOR_EACH_INSN_DEF (def, insn)
108038fd1498Szrj             {
108138fd1498Szrj               unsigned int regno = DF_REF_REGNO (def);
108238fd1498Szrj               rtx_insn *use_insn;
108338fd1498Szrj 
108438fd1498Szrj               if (!next_use[regno])
108538fd1498Szrj                 continue;
108638fd1498Szrj 
108738fd1498Szrj 	      if (!can_combine_def_p (def))
108838fd1498Szrj 		continue;
108938fd1498Szrj 
109038fd1498Szrj 	      use_insn = next_use[regno];
109138fd1498Szrj 	      next_use[regno] = NULL;
109238fd1498Szrj 
109338fd1498Szrj 	      if (BLOCK_FOR_INSN (use_insn) != bb)
109438fd1498Szrj 		continue;
109538fd1498Szrj 
109638fd1498Szrj 	      /* flow.c claimed:
109738fd1498Szrj 
109838fd1498Szrj 		 We don't build a LOG_LINK for hard registers contained
109938fd1498Szrj 		 in ASM_OPERANDs.  If these registers get replaced,
110038fd1498Szrj 		 we might wind up changing the semantics of the insn,
110138fd1498Szrj 		 even if reload can make what appear to be valid
110238fd1498Szrj 		 assignments later.  */
110338fd1498Szrj 	      if (regno < FIRST_PSEUDO_REGISTER
110438fd1498Szrj 		  && asm_noperands (PATTERN (use_insn)) >= 0)
110538fd1498Szrj 		continue;
110638fd1498Szrj 
110738fd1498Szrj 	      /* Don't add duplicate links between instructions.  */
110838fd1498Szrj 	      struct insn_link *links;
110938fd1498Szrj 	      FOR_EACH_LOG_LINK (links, use_insn)
111038fd1498Szrj 	        if (insn == links->insn && regno == links->regno)
111138fd1498Szrj 		  break;
111238fd1498Szrj 
111338fd1498Szrj 	      if (!links)
111438fd1498Szrj 		LOG_LINKS (use_insn)
111538fd1498Szrj 		  = alloc_insn_link (insn, regno, LOG_LINKS (use_insn));
111638fd1498Szrj             }
111738fd1498Szrj 
111838fd1498Szrj 	  FOR_EACH_INSN_USE (use, insn)
111938fd1498Szrj 	    if (can_combine_use_p (use))
112038fd1498Szrj 	      next_use[DF_REF_REGNO (use)] = insn;
112138fd1498Szrj         }
112238fd1498Szrj     }
112338fd1498Szrj 
112438fd1498Szrj   free (next_use);
112538fd1498Szrj }
112638fd1498Szrj 
112738fd1498Szrj /* Walk the LOG_LINKS of insn B to see if we find a reference to A.  Return
112838fd1498Szrj    true if we found a LOG_LINK that proves that A feeds B.  This only works
112938fd1498Szrj    if there are no instructions between A and B which could have a link
113038fd1498Szrj    depending on A, since in that case we would not record a link for B.
113138fd1498Szrj    We also check the implicit dependency created by a cc0 setter/user
113238fd1498Szrj    pair.  */
113338fd1498Szrj 
113438fd1498Szrj static bool
insn_a_feeds_b(rtx_insn * a,rtx_insn * b)113538fd1498Szrj insn_a_feeds_b (rtx_insn *a, rtx_insn *b)
113638fd1498Szrj {
113738fd1498Szrj   struct insn_link *links;
113838fd1498Szrj   FOR_EACH_LOG_LINK (links, b)
113938fd1498Szrj     if (links->insn == a)
114038fd1498Szrj       return true;
114138fd1498Szrj   if (HAVE_cc0 && sets_cc0_p (a))
114238fd1498Szrj     return true;
114338fd1498Szrj   return false;
114438fd1498Szrj }
114538fd1498Szrj 
114638fd1498Szrj /* Main entry point for combiner.  F is the first insn of the function.
114738fd1498Szrj    NREGS is the first unused pseudo-reg number.
114838fd1498Szrj 
1149*58e805e6Szrj    Return nonzero if the CFG was changed (e.g. if the combiner has
1150*58e805e6Szrj    turned an indirect jump instruction into a direct jump).  */
115138fd1498Szrj static int
combine_instructions(rtx_insn * f,unsigned int nregs)115238fd1498Szrj combine_instructions (rtx_insn *f, unsigned int nregs)
115338fd1498Szrj {
115438fd1498Szrj   rtx_insn *insn, *next;
115538fd1498Szrj   rtx_insn *prev;
115638fd1498Szrj   struct insn_link *links, *nextlinks;
115738fd1498Szrj   rtx_insn *first;
115838fd1498Szrj   basic_block last_bb;
115938fd1498Szrj 
116038fd1498Szrj   int new_direct_jump_p = 0;
116138fd1498Szrj 
116238fd1498Szrj   for (first = f; first && !NONDEBUG_INSN_P (first); )
116338fd1498Szrj     first = NEXT_INSN (first);
116438fd1498Szrj   if (!first)
116538fd1498Szrj     return 0;
116638fd1498Szrj 
116738fd1498Szrj   combine_attempts = 0;
116838fd1498Szrj   combine_merges = 0;
116938fd1498Szrj   combine_extras = 0;
117038fd1498Szrj   combine_successes = 0;
117138fd1498Szrj 
117238fd1498Szrj   rtl_hooks = combine_rtl_hooks;
117338fd1498Szrj 
117438fd1498Szrj   reg_stat.safe_grow_cleared (nregs);
117538fd1498Szrj 
117638fd1498Szrj   init_recog_no_volatile ();
117738fd1498Szrj 
117838fd1498Szrj   /* Allocate array for insn info.  */
117938fd1498Szrj   max_uid_known = get_max_uid ();
118038fd1498Szrj   uid_log_links = XCNEWVEC (struct insn_link *, max_uid_known + 1);
118138fd1498Szrj   uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
118238fd1498Szrj   gcc_obstack_init (&insn_link_obstack);
118338fd1498Szrj 
118438fd1498Szrj   nonzero_bits_mode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
118538fd1498Szrj 
118638fd1498Szrj   /* Don't use reg_stat[].nonzero_bits when computing it.  This can cause
118738fd1498Szrj      problems when, for example, we have j <<= 1 in a loop.  */
118838fd1498Szrj 
118938fd1498Szrj   nonzero_sign_valid = 0;
119038fd1498Szrj   label_tick = label_tick_ebb_start = 1;
119138fd1498Szrj 
119238fd1498Szrj   /* Scan all SETs and see if we can deduce anything about what
119338fd1498Szrj      bits are known to be zero for some registers and how many copies
119438fd1498Szrj      of the sign bit are known to exist for those registers.
119538fd1498Szrj 
119638fd1498Szrj      Also set any known values so that we can use it while searching
119738fd1498Szrj      for what bits are known to be set.  */
119838fd1498Szrj 
119938fd1498Szrj   setup_incoming_promotions (first);
120038fd1498Szrj   /* Allow the entry block and the first block to fall into the same EBB.
120138fd1498Szrj      Conceptually the incoming promotions are assigned to the entry block.  */
120238fd1498Szrj   last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
120338fd1498Szrj 
120438fd1498Szrj   create_log_links ();
120538fd1498Szrj   FOR_EACH_BB_FN (this_basic_block, cfun)
120638fd1498Szrj     {
120738fd1498Szrj       optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
120838fd1498Szrj       last_call_luid = 0;
120938fd1498Szrj       mem_last_set = -1;
121038fd1498Szrj 
121138fd1498Szrj       label_tick++;
121238fd1498Szrj       if (!single_pred_p (this_basic_block)
121338fd1498Szrj 	  || single_pred (this_basic_block) != last_bb)
121438fd1498Szrj 	label_tick_ebb_start = label_tick;
121538fd1498Szrj       last_bb = this_basic_block;
121638fd1498Szrj 
121738fd1498Szrj       FOR_BB_INSNS (this_basic_block, insn)
121838fd1498Szrj         if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
121938fd1498Szrj 	  {
122038fd1498Szrj             rtx links;
122138fd1498Szrj 
122238fd1498Szrj             subst_low_luid = DF_INSN_LUID (insn);
122338fd1498Szrj             subst_insn = insn;
122438fd1498Szrj 
122538fd1498Szrj 	    note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
122638fd1498Szrj 		         insn);
122738fd1498Szrj 	    record_dead_and_set_regs (insn);
122838fd1498Szrj 
122938fd1498Szrj 	    if (AUTO_INC_DEC)
123038fd1498Szrj 	      for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
123138fd1498Szrj 		if (REG_NOTE_KIND (links) == REG_INC)
123238fd1498Szrj 		  set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
123338fd1498Szrj 						    insn);
123438fd1498Szrj 
123538fd1498Szrj 	    /* Record the current insn_cost of this instruction.  */
123638fd1498Szrj 	    if (NONJUMP_INSN_P (insn))
123738fd1498Szrj 	      INSN_COST (insn) = insn_cost (insn, optimize_this_for_speed_p);
123838fd1498Szrj 	    if (dump_file)
123938fd1498Szrj 	      {
124038fd1498Szrj 		fprintf (dump_file, "insn_cost %d for ", INSN_COST (insn));
124138fd1498Szrj 		dump_insn_slim (dump_file, insn);
124238fd1498Szrj 	      }
124338fd1498Szrj 	  }
124438fd1498Szrj     }
124538fd1498Szrj 
124638fd1498Szrj   nonzero_sign_valid = 1;
124738fd1498Szrj 
124838fd1498Szrj   /* Now scan all the insns in forward order.  */
124938fd1498Szrj   label_tick = label_tick_ebb_start = 1;
125038fd1498Szrj   init_reg_last ();
125138fd1498Szrj   setup_incoming_promotions (first);
125238fd1498Szrj   last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
125338fd1498Szrj   int max_combine = PARAM_VALUE (PARAM_MAX_COMBINE_INSNS);
125438fd1498Szrj 
125538fd1498Szrj   FOR_EACH_BB_FN (this_basic_block, cfun)
125638fd1498Szrj     {
125738fd1498Szrj       rtx_insn *last_combined_insn = NULL;
125838fd1498Szrj 
125938fd1498Szrj       /* Ignore instruction combination in basic blocks that are going to
126038fd1498Szrj 	 be removed as unreachable anyway.  See PR82386.  */
126138fd1498Szrj       if (EDGE_COUNT (this_basic_block->preds) == 0)
126238fd1498Szrj 	continue;
126338fd1498Szrj 
126438fd1498Szrj       optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
126538fd1498Szrj       last_call_luid = 0;
126638fd1498Szrj       mem_last_set = -1;
126738fd1498Szrj 
126838fd1498Szrj       label_tick++;
126938fd1498Szrj       if (!single_pred_p (this_basic_block)
127038fd1498Szrj 	  || single_pred (this_basic_block) != last_bb)
127138fd1498Szrj 	label_tick_ebb_start = label_tick;
127238fd1498Szrj       last_bb = this_basic_block;
127338fd1498Szrj 
127438fd1498Szrj       rtl_profile_for_bb (this_basic_block);
127538fd1498Szrj       for (insn = BB_HEAD (this_basic_block);
127638fd1498Szrj 	   insn != NEXT_INSN (BB_END (this_basic_block));
127738fd1498Szrj 	   insn = next ? next : NEXT_INSN (insn))
127838fd1498Szrj 	{
127938fd1498Szrj 	  next = 0;
128038fd1498Szrj 	  if (!NONDEBUG_INSN_P (insn))
128138fd1498Szrj 	    continue;
128238fd1498Szrj 
128338fd1498Szrj 	  while (last_combined_insn
128438fd1498Szrj 		 && (!NONDEBUG_INSN_P (last_combined_insn)
128538fd1498Szrj 		     || last_combined_insn->deleted ()))
128638fd1498Szrj 	    last_combined_insn = PREV_INSN (last_combined_insn);
128738fd1498Szrj 	  if (last_combined_insn == NULL_RTX
128838fd1498Szrj 	      || BLOCK_FOR_INSN (last_combined_insn) != this_basic_block
128938fd1498Szrj 	      || DF_INSN_LUID (last_combined_insn) <= DF_INSN_LUID (insn))
129038fd1498Szrj 	    last_combined_insn = insn;
129138fd1498Szrj 
129238fd1498Szrj 	  /* See if we know about function return values before this
129338fd1498Szrj 	     insn based upon SUBREG flags.  */
129438fd1498Szrj 	  check_promoted_subreg (insn, PATTERN (insn));
129538fd1498Szrj 
129638fd1498Szrj 	  /* See if we can find hardregs and subreg of pseudos in
129738fd1498Szrj 	     narrower modes.  This could help turning TRUNCATEs
129838fd1498Szrj 	     into SUBREGs.  */
129938fd1498Szrj 	  note_uses (&PATTERN (insn), record_truncated_values, NULL);
130038fd1498Szrj 
130138fd1498Szrj 	  /* Try this insn with each insn it links back to.  */
130238fd1498Szrj 
130338fd1498Szrj 	  FOR_EACH_LOG_LINK (links, insn)
130438fd1498Szrj 	    if ((next = try_combine (insn, links->insn, NULL,
130538fd1498Szrj 				     NULL, &new_direct_jump_p,
130638fd1498Szrj 				     last_combined_insn)) != 0)
130738fd1498Szrj 	      {
130838fd1498Szrj 		statistics_counter_event (cfun, "two-insn combine", 1);
130938fd1498Szrj 		goto retry;
131038fd1498Szrj 	      }
131138fd1498Szrj 
131238fd1498Szrj 	  /* Try each sequence of three linked insns ending with this one.  */
131338fd1498Szrj 
131438fd1498Szrj 	  if (max_combine >= 3)
131538fd1498Szrj 	    FOR_EACH_LOG_LINK (links, insn)
131638fd1498Szrj 	      {
131738fd1498Szrj 		rtx_insn *link = links->insn;
131838fd1498Szrj 
131938fd1498Szrj 		/* If the linked insn has been replaced by a note, then there
132038fd1498Szrj 		   is no point in pursuing this chain any further.  */
132138fd1498Szrj 		if (NOTE_P (link))
132238fd1498Szrj 		  continue;
132338fd1498Szrj 
132438fd1498Szrj 		FOR_EACH_LOG_LINK (nextlinks, link)
132538fd1498Szrj 		  if ((next = try_combine (insn, link, nextlinks->insn,
132638fd1498Szrj 					   NULL, &new_direct_jump_p,
132738fd1498Szrj 					   last_combined_insn)) != 0)
132838fd1498Szrj 		    {
132938fd1498Szrj 		      statistics_counter_event (cfun, "three-insn combine", 1);
133038fd1498Szrj 		      goto retry;
133138fd1498Szrj 		    }
133238fd1498Szrj 	      }
133338fd1498Szrj 
133438fd1498Szrj 	  /* Try to combine a jump insn that uses CC0
133538fd1498Szrj 	     with a preceding insn that sets CC0, and maybe with its
133638fd1498Szrj 	     logical predecessor as well.
133738fd1498Szrj 	     This is how we make decrement-and-branch insns.
133838fd1498Szrj 	     We need this special code because data flow connections
133938fd1498Szrj 	     via CC0 do not get entered in LOG_LINKS.  */
134038fd1498Szrj 
134138fd1498Szrj 	  if (HAVE_cc0
134238fd1498Szrj 	      && JUMP_P (insn)
134338fd1498Szrj 	      && (prev = prev_nonnote_insn (insn)) != 0
134438fd1498Szrj 	      && NONJUMP_INSN_P (prev)
134538fd1498Szrj 	      && sets_cc0_p (PATTERN (prev)))
134638fd1498Szrj 	    {
134738fd1498Szrj 	      if ((next = try_combine (insn, prev, NULL, NULL,
134838fd1498Szrj 				       &new_direct_jump_p,
134938fd1498Szrj 				       last_combined_insn)) != 0)
135038fd1498Szrj 		goto retry;
135138fd1498Szrj 
135238fd1498Szrj 	      FOR_EACH_LOG_LINK (nextlinks, prev)
135338fd1498Szrj 		  if ((next = try_combine (insn, prev, nextlinks->insn,
135438fd1498Szrj 					   NULL, &new_direct_jump_p,
135538fd1498Szrj 					   last_combined_insn)) != 0)
135638fd1498Szrj 		    goto retry;
135738fd1498Szrj 	    }
135838fd1498Szrj 
135938fd1498Szrj 	  /* Do the same for an insn that explicitly references CC0.  */
136038fd1498Szrj 	  if (HAVE_cc0 && NONJUMP_INSN_P (insn)
136138fd1498Szrj 	      && (prev = prev_nonnote_insn (insn)) != 0
136238fd1498Szrj 	      && NONJUMP_INSN_P (prev)
136338fd1498Szrj 	      && sets_cc0_p (PATTERN (prev))
136438fd1498Szrj 	      && GET_CODE (PATTERN (insn)) == SET
136538fd1498Szrj 	      && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
136638fd1498Szrj 	    {
136738fd1498Szrj 	      if ((next = try_combine (insn, prev, NULL, NULL,
136838fd1498Szrj 				       &new_direct_jump_p,
136938fd1498Szrj 				       last_combined_insn)) != 0)
137038fd1498Szrj 		goto retry;
137138fd1498Szrj 
137238fd1498Szrj 	      FOR_EACH_LOG_LINK (nextlinks, prev)
137338fd1498Szrj 		  if ((next = try_combine (insn, prev, nextlinks->insn,
137438fd1498Szrj 					   NULL, &new_direct_jump_p,
137538fd1498Szrj 					   last_combined_insn)) != 0)
137638fd1498Szrj 		    goto retry;
137738fd1498Szrj 	    }
137838fd1498Szrj 
137938fd1498Szrj 	  /* Finally, see if any of the insns that this insn links to
138038fd1498Szrj 	     explicitly references CC0.  If so, try this insn, that insn,
138138fd1498Szrj 	     and its predecessor if it sets CC0.  */
138238fd1498Szrj 	  if (HAVE_cc0)
138338fd1498Szrj 	    {
138438fd1498Szrj 	      FOR_EACH_LOG_LINK (links, insn)
138538fd1498Szrj 		if (NONJUMP_INSN_P (links->insn)
138638fd1498Szrj 		    && GET_CODE (PATTERN (links->insn)) == SET
138738fd1498Szrj 		    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (links->insn)))
138838fd1498Szrj 		    && (prev = prev_nonnote_insn (links->insn)) != 0
138938fd1498Szrj 		    && NONJUMP_INSN_P (prev)
139038fd1498Szrj 		    && sets_cc0_p (PATTERN (prev))
139138fd1498Szrj 		    && (next = try_combine (insn, links->insn,
139238fd1498Szrj 					    prev, NULL, &new_direct_jump_p,
139338fd1498Szrj 					    last_combined_insn)) != 0)
139438fd1498Szrj 		  goto retry;
139538fd1498Szrj 	    }
139638fd1498Szrj 
139738fd1498Szrj 	  /* Try combining an insn with two different insns whose results it
139838fd1498Szrj 	     uses.  */
139938fd1498Szrj 	  if (max_combine >= 3)
140038fd1498Szrj 	    FOR_EACH_LOG_LINK (links, insn)
140138fd1498Szrj 	      for (nextlinks = links->next; nextlinks;
140238fd1498Szrj 		   nextlinks = nextlinks->next)
140338fd1498Szrj 		if ((next = try_combine (insn, links->insn,
140438fd1498Szrj 					 nextlinks->insn, NULL,
140538fd1498Szrj 					 &new_direct_jump_p,
140638fd1498Szrj 					 last_combined_insn)) != 0)
140738fd1498Szrj 
140838fd1498Szrj 		  {
140938fd1498Szrj 		    statistics_counter_event (cfun, "three-insn combine", 1);
141038fd1498Szrj 		    goto retry;
141138fd1498Szrj 		  }
141238fd1498Szrj 
141338fd1498Szrj 	  /* Try four-instruction combinations.  */
141438fd1498Szrj 	  if (max_combine >= 4)
141538fd1498Szrj 	    FOR_EACH_LOG_LINK (links, insn)
141638fd1498Szrj 	      {
141738fd1498Szrj 		struct insn_link *next1;
141838fd1498Szrj 		rtx_insn *link = links->insn;
141938fd1498Szrj 
142038fd1498Szrj 		/* If the linked insn has been replaced by a note, then there
142138fd1498Szrj 		   is no point in pursuing this chain any further.  */
142238fd1498Szrj 		if (NOTE_P (link))
142338fd1498Szrj 		  continue;
142438fd1498Szrj 
142538fd1498Szrj 		FOR_EACH_LOG_LINK (next1, link)
142638fd1498Szrj 		  {
142738fd1498Szrj 		    rtx_insn *link1 = next1->insn;
142838fd1498Szrj 		    if (NOTE_P (link1))
142938fd1498Szrj 		      continue;
143038fd1498Szrj 		    /* I0 -> I1 -> I2 -> I3.  */
143138fd1498Szrj 		    FOR_EACH_LOG_LINK (nextlinks, link1)
143238fd1498Szrj 		      if ((next = try_combine (insn, link, link1,
143338fd1498Szrj 					       nextlinks->insn,
143438fd1498Szrj 					       &new_direct_jump_p,
143538fd1498Szrj 					       last_combined_insn)) != 0)
143638fd1498Szrj 			{
143738fd1498Szrj 			  statistics_counter_event (cfun, "four-insn combine", 1);
143838fd1498Szrj 			  goto retry;
143938fd1498Szrj 			}
144038fd1498Szrj 		    /* I0, I1 -> I2, I2 -> I3.  */
144138fd1498Szrj 		    for (nextlinks = next1->next; nextlinks;
144238fd1498Szrj 			 nextlinks = nextlinks->next)
144338fd1498Szrj 		      if ((next = try_combine (insn, link, link1,
144438fd1498Szrj 					       nextlinks->insn,
144538fd1498Szrj 					       &new_direct_jump_p,
144638fd1498Szrj 					       last_combined_insn)) != 0)
144738fd1498Szrj 			{
144838fd1498Szrj 			  statistics_counter_event (cfun, "four-insn combine", 1);
144938fd1498Szrj 			  goto retry;
145038fd1498Szrj 			}
145138fd1498Szrj 		  }
145238fd1498Szrj 
145338fd1498Szrj 		for (next1 = links->next; next1; next1 = next1->next)
145438fd1498Szrj 		  {
145538fd1498Szrj 		    rtx_insn *link1 = next1->insn;
145638fd1498Szrj 		    if (NOTE_P (link1))
145738fd1498Szrj 		      continue;
145838fd1498Szrj 		    /* I0 -> I2; I1, I2 -> I3.  */
145938fd1498Szrj 		    FOR_EACH_LOG_LINK (nextlinks, link)
146038fd1498Szrj 		      if ((next = try_combine (insn, link, link1,
146138fd1498Szrj 					       nextlinks->insn,
146238fd1498Szrj 					       &new_direct_jump_p,
146338fd1498Szrj 					       last_combined_insn)) != 0)
146438fd1498Szrj 			{
146538fd1498Szrj 			  statistics_counter_event (cfun, "four-insn combine", 1);
146638fd1498Szrj 			  goto retry;
146738fd1498Szrj 			}
146838fd1498Szrj 		    /* I0 -> I1; I1, I2 -> I3.  */
146938fd1498Szrj 		    FOR_EACH_LOG_LINK (nextlinks, link1)
147038fd1498Szrj 		      if ((next = try_combine (insn, link, link1,
147138fd1498Szrj 					       nextlinks->insn,
147238fd1498Szrj 					       &new_direct_jump_p,
147338fd1498Szrj 					       last_combined_insn)) != 0)
147438fd1498Szrj 			{
147538fd1498Szrj 			  statistics_counter_event (cfun, "four-insn combine", 1);
147638fd1498Szrj 			  goto retry;
147738fd1498Szrj 			}
147838fd1498Szrj 		  }
147938fd1498Szrj 	      }
148038fd1498Szrj 
148138fd1498Szrj 	  /* Try this insn with each REG_EQUAL note it links back to.  */
148238fd1498Szrj 	  FOR_EACH_LOG_LINK (links, insn)
148338fd1498Szrj 	    {
148438fd1498Szrj 	      rtx set, note;
148538fd1498Szrj 	      rtx_insn *temp = links->insn;
148638fd1498Szrj 	      if ((set = single_set (temp)) != 0
148738fd1498Szrj 		  && (note = find_reg_equal_equiv_note (temp)) != 0
148838fd1498Szrj 		  && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
148938fd1498Szrj 		  /* Avoid using a register that may already been marked
149038fd1498Szrj 		     dead by an earlier instruction.  */
149138fd1498Szrj 		  && ! unmentioned_reg_p (note, SET_SRC (set))
149238fd1498Szrj 		  && (GET_MODE (note) == VOIDmode
149338fd1498Szrj 		      ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
149438fd1498Szrj 		      : (GET_MODE (SET_DEST (set)) == GET_MODE (note)
149538fd1498Szrj 			 && (GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
149638fd1498Szrj 			     || (GET_MODE (XEXP (SET_DEST (set), 0))
149738fd1498Szrj 				 == GET_MODE (note))))))
149838fd1498Szrj 		{
149938fd1498Szrj 		  /* Temporarily replace the set's source with the
150038fd1498Szrj 		     contents of the REG_EQUAL note.  The insn will
150138fd1498Szrj 		     be deleted or recognized by try_combine.  */
150238fd1498Szrj 		  rtx orig_src = SET_SRC (set);
150338fd1498Szrj 		  rtx orig_dest = SET_DEST (set);
150438fd1498Szrj 		  if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT)
150538fd1498Szrj 		    SET_DEST (set) = XEXP (SET_DEST (set), 0);
150638fd1498Szrj 		  SET_SRC (set) = note;
150738fd1498Szrj 		  i2mod = temp;
150838fd1498Szrj 		  i2mod_old_rhs = copy_rtx (orig_src);
150938fd1498Szrj 		  i2mod_new_rhs = copy_rtx (note);
151038fd1498Szrj 		  next = try_combine (insn, i2mod, NULL, NULL,
151138fd1498Szrj 				      &new_direct_jump_p,
151238fd1498Szrj 				      last_combined_insn);
151338fd1498Szrj 		  i2mod = NULL;
151438fd1498Szrj 		  if (next)
151538fd1498Szrj 		    {
151638fd1498Szrj 		      statistics_counter_event (cfun, "insn-with-note combine", 1);
151738fd1498Szrj 		      goto retry;
151838fd1498Szrj 		    }
151938fd1498Szrj 		  SET_SRC (set) = orig_src;
152038fd1498Szrj 		  SET_DEST (set) = orig_dest;
152138fd1498Szrj 		}
152238fd1498Szrj 	    }
152338fd1498Szrj 
152438fd1498Szrj 	  if (!NOTE_P (insn))
152538fd1498Szrj 	    record_dead_and_set_regs (insn);
152638fd1498Szrj 
152738fd1498Szrj retry:
152838fd1498Szrj 	  ;
152938fd1498Szrj 	}
153038fd1498Szrj     }
153138fd1498Szrj 
153238fd1498Szrj   default_rtl_profile ();
153338fd1498Szrj   clear_bb_flags ();
153438fd1498Szrj   new_direct_jump_p |= purge_all_dead_edges ();
1535*58e805e6Szrj   new_direct_jump_p |= delete_noop_moves ();
153638fd1498Szrj 
153738fd1498Szrj   /* Clean up.  */
153838fd1498Szrj   obstack_free (&insn_link_obstack, NULL);
153938fd1498Szrj   free (uid_log_links);
154038fd1498Szrj   free (uid_insn_cost);
154138fd1498Szrj   reg_stat.release ();
154238fd1498Szrj 
154338fd1498Szrj   {
154438fd1498Szrj     struct undo *undo, *next;
154538fd1498Szrj     for (undo = undobuf.frees; undo; undo = next)
154638fd1498Szrj       {
154738fd1498Szrj 	next = undo->next;
154838fd1498Szrj 	free (undo);
154938fd1498Szrj       }
155038fd1498Szrj     undobuf.frees = 0;
155138fd1498Szrj   }
155238fd1498Szrj 
155338fd1498Szrj   total_attempts += combine_attempts;
155438fd1498Szrj   total_merges += combine_merges;
155538fd1498Szrj   total_extras += combine_extras;
155638fd1498Szrj   total_successes += combine_successes;
155738fd1498Szrj 
155838fd1498Szrj   nonzero_sign_valid = 0;
155938fd1498Szrj   rtl_hooks = general_rtl_hooks;
156038fd1498Szrj 
156138fd1498Szrj   /* Make recognizer allow volatile MEMs again.  */
156238fd1498Szrj   init_recog ();
156338fd1498Szrj 
156438fd1498Szrj   return new_direct_jump_p;
156538fd1498Szrj }
156638fd1498Szrj 
156738fd1498Szrj /* Wipe the last_xxx fields of reg_stat in preparation for another pass.  */
156838fd1498Szrj 
156938fd1498Szrj static void
init_reg_last(void)157038fd1498Szrj init_reg_last (void)
157138fd1498Szrj {
157238fd1498Szrj   unsigned int i;
157338fd1498Szrj   reg_stat_type *p;
157438fd1498Szrj 
157538fd1498Szrj   FOR_EACH_VEC_ELT (reg_stat, i, p)
157638fd1498Szrj     memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
157738fd1498Szrj }
157838fd1498Szrj 
157938fd1498Szrj /* Set up any promoted values for incoming argument registers.  */
158038fd1498Szrj 
158138fd1498Szrj static void
setup_incoming_promotions(rtx_insn * first)158238fd1498Szrj setup_incoming_promotions (rtx_insn *first)
158338fd1498Szrj {
158438fd1498Szrj   tree arg;
158538fd1498Szrj   bool strictly_local = false;
158638fd1498Szrj 
158738fd1498Szrj   for (arg = DECL_ARGUMENTS (current_function_decl); arg;
158838fd1498Szrj        arg = DECL_CHAIN (arg))
158938fd1498Szrj     {
159038fd1498Szrj       rtx x, reg = DECL_INCOMING_RTL (arg);
159138fd1498Szrj       int uns1, uns3;
159238fd1498Szrj       machine_mode mode1, mode2, mode3, mode4;
159338fd1498Szrj 
159438fd1498Szrj       /* Only continue if the incoming argument is in a register.  */
159538fd1498Szrj       if (!REG_P (reg))
159638fd1498Szrj 	continue;
159738fd1498Szrj 
159838fd1498Szrj       /* Determine, if possible, whether all call sites of the current
159938fd1498Szrj          function lie within the current compilation unit.  (This does
160038fd1498Szrj 	 take into account the exporting of a function via taking its
160138fd1498Szrj 	 address, and so forth.)  */
160238fd1498Szrj       strictly_local = cgraph_node::local_info (current_function_decl)->local;
160338fd1498Szrj 
160438fd1498Szrj       /* The mode and signedness of the argument before any promotions happen
160538fd1498Szrj          (equal to the mode of the pseudo holding it at that stage).  */
160638fd1498Szrj       mode1 = TYPE_MODE (TREE_TYPE (arg));
160738fd1498Szrj       uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
160838fd1498Szrj 
160938fd1498Szrj       /* The mode and signedness of the argument after any source language and
161038fd1498Szrj          TARGET_PROMOTE_PROTOTYPES-driven promotions.  */
161138fd1498Szrj       mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
161238fd1498Szrj       uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
161338fd1498Szrj 
161438fd1498Szrj       /* The mode and signedness of the argument as it is actually passed,
161538fd1498Szrj          see assign_parm_setup_reg in function.c.  */
161638fd1498Szrj       mode3 = promote_function_mode (TREE_TYPE (arg), mode1, &uns3,
161738fd1498Szrj 				     TREE_TYPE (cfun->decl), 0);
161838fd1498Szrj 
161938fd1498Szrj       /* The mode of the register in which the argument is being passed.  */
162038fd1498Szrj       mode4 = GET_MODE (reg);
162138fd1498Szrj 
162238fd1498Szrj       /* Eliminate sign extensions in the callee when:
162338fd1498Szrj 	 (a) A mode promotion has occurred;  */
162438fd1498Szrj       if (mode1 == mode3)
162538fd1498Szrj 	continue;
162638fd1498Szrj       /* (b) The mode of the register is the same as the mode of
162738fd1498Szrj 	     the argument as it is passed; */
162838fd1498Szrj       if (mode3 != mode4)
162938fd1498Szrj 	continue;
163038fd1498Szrj       /* (c) There's no language level extension;  */
163138fd1498Szrj       if (mode1 == mode2)
163238fd1498Szrj 	;
163338fd1498Szrj       /* (c.1) All callers are from the current compilation unit.  If that's
163438fd1498Szrj 	 the case we don't have to rely on an ABI, we only have to know
163538fd1498Szrj 	 what we're generating right now, and we know that we will do the
163638fd1498Szrj 	 mode1 to mode2 promotion with the given sign.  */
163738fd1498Szrj       else if (!strictly_local)
163838fd1498Szrj 	continue;
163938fd1498Szrj       /* (c.2) The combination of the two promotions is useful.  This is
164038fd1498Szrj 	 true when the signs match, or if the first promotion is unsigned.
164138fd1498Szrj 	 In the later case, (sign_extend (zero_extend x)) is the same as
164238fd1498Szrj 	 (zero_extend (zero_extend x)), so make sure to force UNS3 true.  */
164338fd1498Szrj       else if (uns1)
164438fd1498Szrj 	uns3 = true;
164538fd1498Szrj       else if (uns3)
164638fd1498Szrj 	continue;
164738fd1498Szrj 
164838fd1498Szrj       /* Record that the value was promoted from mode1 to mode3,
164938fd1498Szrj 	 so that any sign extension at the head of the current
165038fd1498Szrj 	 function may be eliminated.  */
165138fd1498Szrj       x = gen_rtx_CLOBBER (mode1, const0_rtx);
165238fd1498Szrj       x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
165338fd1498Szrj       record_value_for_reg (reg, first, x);
165438fd1498Szrj     }
165538fd1498Szrj }
165638fd1498Szrj 
165738fd1498Szrj /* If MODE has a precision lower than PREC and SRC is a non-negative constant
165838fd1498Szrj    that would appear negative in MODE, sign-extend SRC for use in nonzero_bits
165938fd1498Szrj    because some machines (maybe most) will actually do the sign-extension and
166038fd1498Szrj    this is the conservative approach.
166138fd1498Szrj 
166238fd1498Szrj    ??? For 2.5, try to tighten up the MD files in this regard instead of this
166338fd1498Szrj    kludge.  */
166438fd1498Szrj 
166538fd1498Szrj static rtx
sign_extend_short_imm(rtx src,machine_mode mode,unsigned int prec)166638fd1498Szrj sign_extend_short_imm (rtx src, machine_mode mode, unsigned int prec)
166738fd1498Szrj {
166838fd1498Szrj   scalar_int_mode int_mode;
166938fd1498Szrj   if (CONST_INT_P (src)
167038fd1498Szrj       && is_a <scalar_int_mode> (mode, &int_mode)
167138fd1498Szrj       && GET_MODE_PRECISION (int_mode) < prec
167238fd1498Szrj       && INTVAL (src) > 0
167338fd1498Szrj       && val_signbit_known_set_p (int_mode, INTVAL (src)))
167438fd1498Szrj     src = GEN_INT (INTVAL (src) | ~GET_MODE_MASK (int_mode));
167538fd1498Szrj 
167638fd1498Szrj   return src;
167738fd1498Szrj }
167838fd1498Szrj 
167938fd1498Szrj /* Update RSP for pseudo-register X from INSN's REG_EQUAL note (if one exists)
168038fd1498Szrj    and SET.  */
168138fd1498Szrj 
168238fd1498Szrj static void
update_rsp_from_reg_equal(reg_stat_type * rsp,rtx_insn * insn,const_rtx set,rtx x)168338fd1498Szrj update_rsp_from_reg_equal (reg_stat_type *rsp, rtx_insn *insn, const_rtx set,
168438fd1498Szrj 			   rtx x)
168538fd1498Szrj {
168638fd1498Szrj   rtx reg_equal_note = insn ? find_reg_equal_equiv_note (insn) : NULL_RTX;
168738fd1498Szrj   unsigned HOST_WIDE_INT bits = 0;
168838fd1498Szrj   rtx reg_equal = NULL, src = SET_SRC (set);
168938fd1498Szrj   unsigned int num = 0;
169038fd1498Szrj 
169138fd1498Szrj   if (reg_equal_note)
169238fd1498Szrj     reg_equal = XEXP (reg_equal_note, 0);
169338fd1498Szrj 
169438fd1498Szrj   if (SHORT_IMMEDIATES_SIGN_EXTEND)
169538fd1498Szrj     {
169638fd1498Szrj       src = sign_extend_short_imm (src, GET_MODE (x), BITS_PER_WORD);
169738fd1498Szrj       if (reg_equal)
169838fd1498Szrj 	reg_equal = sign_extend_short_imm (reg_equal, GET_MODE (x), BITS_PER_WORD);
169938fd1498Szrj     }
170038fd1498Szrj 
170138fd1498Szrj   /* Don't call nonzero_bits if it cannot change anything.  */
170238fd1498Szrj   if (rsp->nonzero_bits != HOST_WIDE_INT_M1U)
170338fd1498Szrj     {
170438fd1498Szrj       bits = nonzero_bits (src, nonzero_bits_mode);
170538fd1498Szrj       if (reg_equal && bits)
170638fd1498Szrj 	bits &= nonzero_bits (reg_equal, nonzero_bits_mode);
170738fd1498Szrj       rsp->nonzero_bits |= bits;
170838fd1498Szrj     }
170938fd1498Szrj 
171038fd1498Szrj   /* Don't call num_sign_bit_copies if it cannot change anything.  */
171138fd1498Szrj   if (rsp->sign_bit_copies != 1)
171238fd1498Szrj     {
171338fd1498Szrj       num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
171438fd1498Szrj       if (reg_equal && maybe_ne (num, GET_MODE_PRECISION (GET_MODE (x))))
171538fd1498Szrj 	{
171638fd1498Szrj 	  unsigned int numeq = num_sign_bit_copies (reg_equal, GET_MODE (x));
171738fd1498Szrj 	  if (num == 0 || numeq > num)
171838fd1498Szrj 	    num = numeq;
171938fd1498Szrj 	}
172038fd1498Szrj       if (rsp->sign_bit_copies == 0 || num < rsp->sign_bit_copies)
172138fd1498Szrj 	rsp->sign_bit_copies = num;
172238fd1498Szrj     }
172338fd1498Szrj }
172438fd1498Szrj 
172538fd1498Szrj /* Called via note_stores.  If X is a pseudo that is narrower than
172638fd1498Szrj    HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
172738fd1498Szrj 
172838fd1498Szrj    If we are setting only a portion of X and we can't figure out what
172938fd1498Szrj    portion, assume all bits will be used since we don't know what will
173038fd1498Szrj    be happening.
173138fd1498Szrj 
173238fd1498Szrj    Similarly, set how many bits of X are known to be copies of the sign bit
173338fd1498Szrj    at all locations in the function.  This is the smallest number implied
173438fd1498Szrj    by any set of X.  */
173538fd1498Szrj 
173638fd1498Szrj static void
set_nonzero_bits_and_sign_copies(rtx x,const_rtx set,void * data)173738fd1498Szrj set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
173838fd1498Szrj {
173938fd1498Szrj   rtx_insn *insn = (rtx_insn *) data;
174038fd1498Szrj   scalar_int_mode mode;
174138fd1498Szrj 
174238fd1498Szrj   if (REG_P (x)
174338fd1498Szrj       && REGNO (x) >= FIRST_PSEUDO_REGISTER
174438fd1498Szrj       /* If this register is undefined at the start of the file, we can't
174538fd1498Szrj 	 say what its contents were.  */
174638fd1498Szrj       && ! REGNO_REG_SET_P
174738fd1498Szrj 	   (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), REGNO (x))
174838fd1498Szrj       && is_a <scalar_int_mode> (GET_MODE (x), &mode)
174938fd1498Szrj       && HWI_COMPUTABLE_MODE_P (mode))
175038fd1498Szrj     {
175138fd1498Szrj       reg_stat_type *rsp = &reg_stat[REGNO (x)];
175238fd1498Szrj 
175338fd1498Szrj       if (set == 0 || GET_CODE (set) == CLOBBER)
175438fd1498Szrj 	{
175538fd1498Szrj 	  rsp->nonzero_bits = GET_MODE_MASK (mode);
175638fd1498Szrj 	  rsp->sign_bit_copies = 1;
175738fd1498Szrj 	  return;
175838fd1498Szrj 	}
175938fd1498Szrj 
176038fd1498Szrj       /* If this register is being initialized using itself, and the
176138fd1498Szrj 	 register is uninitialized in this basic block, and there are
176238fd1498Szrj 	 no LOG_LINKS which set the register, then part of the
176338fd1498Szrj 	 register is uninitialized.  In that case we can't assume
176438fd1498Szrj 	 anything about the number of nonzero bits.
176538fd1498Szrj 
176638fd1498Szrj 	 ??? We could do better if we checked this in
176738fd1498Szrj 	 reg_{nonzero_bits,num_sign_bit_copies}_for_combine.  Then we
176838fd1498Szrj 	 could avoid making assumptions about the insn which initially
176938fd1498Szrj 	 sets the register, while still using the information in other
177038fd1498Szrj 	 insns.  We would have to be careful to check every insn
177138fd1498Szrj 	 involved in the combination.  */
177238fd1498Szrj 
177338fd1498Szrj       if (insn
177438fd1498Szrj 	  && reg_referenced_p (x, PATTERN (insn))
177538fd1498Szrj 	  && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
177638fd1498Szrj 			       REGNO (x)))
177738fd1498Szrj 	{
177838fd1498Szrj 	  struct insn_link *link;
177938fd1498Szrj 
178038fd1498Szrj 	  FOR_EACH_LOG_LINK (link, insn)
178138fd1498Szrj 	    if (dead_or_set_p (link->insn, x))
178238fd1498Szrj 	      break;
178338fd1498Szrj 	  if (!link)
178438fd1498Szrj 	    {
178538fd1498Szrj 	      rsp->nonzero_bits = GET_MODE_MASK (mode);
178638fd1498Szrj 	      rsp->sign_bit_copies = 1;
178738fd1498Szrj 	      return;
178838fd1498Szrj 	    }
178938fd1498Szrj 	}
179038fd1498Szrj 
179138fd1498Szrj       /* If this is a complex assignment, see if we can convert it into a
179238fd1498Szrj 	 simple assignment.  */
179338fd1498Szrj       set = expand_field_assignment (set);
179438fd1498Szrj 
179538fd1498Szrj       /* If this is a simple assignment, or we have a paradoxical SUBREG,
179638fd1498Szrj 	 set what we know about X.  */
179738fd1498Szrj 
179838fd1498Szrj       if (SET_DEST (set) == x
179938fd1498Szrj 	  || (paradoxical_subreg_p (SET_DEST (set))
180038fd1498Szrj 	      && SUBREG_REG (SET_DEST (set)) == x))
180138fd1498Szrj 	update_rsp_from_reg_equal (rsp, insn, set, x);
180238fd1498Szrj       else
180338fd1498Szrj 	{
180438fd1498Szrj 	  rsp->nonzero_bits = GET_MODE_MASK (mode);
180538fd1498Szrj 	  rsp->sign_bit_copies = 1;
180638fd1498Szrj 	}
180738fd1498Szrj     }
180838fd1498Szrj }
180938fd1498Szrj 
181038fd1498Szrj /* See if INSN can be combined into I3.  PRED, PRED2, SUCC and SUCC2 are
181138fd1498Szrj    optionally insns that were previously combined into I3 or that will be
181238fd1498Szrj    combined into the merger of INSN and I3.  The order is PRED, PRED2,
181338fd1498Szrj    INSN, SUCC, SUCC2, I3.
181438fd1498Szrj 
181538fd1498Szrj    Return 0 if the combination is not allowed for any reason.
181638fd1498Szrj 
181738fd1498Szrj    If the combination is allowed, *PDEST will be set to the single
181838fd1498Szrj    destination of INSN and *PSRC to the single source, and this function
181938fd1498Szrj    will return 1.  */
182038fd1498Szrj 
182138fd1498Szrj static int
can_combine_p(rtx_insn * insn,rtx_insn * i3,rtx_insn * pred ATTRIBUTE_UNUSED,rtx_insn * pred2 ATTRIBUTE_UNUSED,rtx_insn * succ,rtx_insn * succ2,rtx * pdest,rtx * psrc)182238fd1498Szrj can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED,
182338fd1498Szrj 	       rtx_insn *pred2 ATTRIBUTE_UNUSED, rtx_insn *succ, rtx_insn *succ2,
182438fd1498Szrj 	       rtx *pdest, rtx *psrc)
182538fd1498Szrj {
182638fd1498Szrj   int i;
182738fd1498Szrj   const_rtx set = 0;
182838fd1498Szrj   rtx src, dest;
182938fd1498Szrj   rtx_insn *p;
183038fd1498Szrj   rtx link;
183138fd1498Szrj   bool all_adjacent = true;
183238fd1498Szrj   int (*is_volatile_p) (const_rtx);
183338fd1498Szrj 
183438fd1498Szrj   if (succ)
183538fd1498Szrj     {
183638fd1498Szrj       if (succ2)
183738fd1498Szrj 	{
183838fd1498Szrj 	  if (next_active_insn (succ2) != i3)
183938fd1498Szrj 	    all_adjacent = false;
184038fd1498Szrj 	  if (next_active_insn (succ) != succ2)
184138fd1498Szrj 	    all_adjacent = false;
184238fd1498Szrj 	}
184338fd1498Szrj       else if (next_active_insn (succ) != i3)
184438fd1498Szrj 	all_adjacent = false;
184538fd1498Szrj       if (next_active_insn (insn) != succ)
184638fd1498Szrj 	all_adjacent = false;
184738fd1498Szrj     }
184838fd1498Szrj   else if (next_active_insn (insn) != i3)
184938fd1498Szrj     all_adjacent = false;
185038fd1498Szrj 
185138fd1498Szrj   /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
185238fd1498Szrj      or a PARALLEL consisting of such a SET and CLOBBERs.
185338fd1498Szrj 
185438fd1498Szrj      If INSN has CLOBBER parallel parts, ignore them for our processing.
185538fd1498Szrj      By definition, these happen during the execution of the insn.  When it
185638fd1498Szrj      is merged with another insn, all bets are off.  If they are, in fact,
185738fd1498Szrj      needed and aren't also supplied in I3, they may be added by
185838fd1498Szrj      recog_for_combine.  Otherwise, it won't match.
185938fd1498Szrj 
186038fd1498Szrj      We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
186138fd1498Szrj      note.
186238fd1498Szrj 
186338fd1498Szrj      Get the source and destination of INSN.  If more than one, can't
186438fd1498Szrj      combine.  */
186538fd1498Szrj 
186638fd1498Szrj   if (GET_CODE (PATTERN (insn)) == SET)
186738fd1498Szrj     set = PATTERN (insn);
186838fd1498Szrj   else if (GET_CODE (PATTERN (insn)) == PARALLEL
186938fd1498Szrj 	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
187038fd1498Szrj     {
187138fd1498Szrj       for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
187238fd1498Szrj 	{
187338fd1498Szrj 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
187438fd1498Szrj 
187538fd1498Szrj 	  switch (GET_CODE (elt))
187638fd1498Szrj 	    {
187738fd1498Szrj 	    /* This is important to combine floating point insns
187838fd1498Szrj 	       for the SH4 port.  */
187938fd1498Szrj 	    case USE:
188038fd1498Szrj 	      /* Combining an isolated USE doesn't make sense.
188138fd1498Szrj 		 We depend here on combinable_i3pat to reject them.  */
188238fd1498Szrj 	      /* The code below this loop only verifies that the inputs of
188338fd1498Szrj 		 the SET in INSN do not change.  We call reg_set_between_p
188438fd1498Szrj 		 to verify that the REG in the USE does not change between
188538fd1498Szrj 		 I3 and INSN.
188638fd1498Szrj 		 If the USE in INSN was for a pseudo register, the matching
188738fd1498Szrj 		 insn pattern will likely match any register; combining this
188838fd1498Szrj 		 with any other USE would only be safe if we knew that the
188938fd1498Szrj 		 used registers have identical values, or if there was
189038fd1498Szrj 		 something to tell them apart, e.g. different modes.  For
189138fd1498Szrj 		 now, we forgo such complicated tests and simply disallow
189238fd1498Szrj 		 combining of USES of pseudo registers with any other USE.  */
189338fd1498Szrj 	      if (REG_P (XEXP (elt, 0))
189438fd1498Szrj 		  && GET_CODE (PATTERN (i3)) == PARALLEL)
189538fd1498Szrj 		{
189638fd1498Szrj 		  rtx i3pat = PATTERN (i3);
189738fd1498Szrj 		  int i = XVECLEN (i3pat, 0) - 1;
189838fd1498Szrj 		  unsigned int regno = REGNO (XEXP (elt, 0));
189938fd1498Szrj 
190038fd1498Szrj 		  do
190138fd1498Szrj 		    {
190238fd1498Szrj 		      rtx i3elt = XVECEXP (i3pat, 0, i);
190338fd1498Szrj 
190438fd1498Szrj 		      if (GET_CODE (i3elt) == USE
190538fd1498Szrj 			  && REG_P (XEXP (i3elt, 0))
190638fd1498Szrj 			  && (REGNO (XEXP (i3elt, 0)) == regno
190738fd1498Szrj 			      ? reg_set_between_p (XEXP (elt, 0),
190838fd1498Szrj 						   PREV_INSN (insn), i3)
190938fd1498Szrj 			      : regno >= FIRST_PSEUDO_REGISTER))
191038fd1498Szrj 			return 0;
191138fd1498Szrj 		    }
191238fd1498Szrj 		  while (--i >= 0);
191338fd1498Szrj 		}
191438fd1498Szrj 	      break;
191538fd1498Szrj 
191638fd1498Szrj 	      /* We can ignore CLOBBERs.  */
191738fd1498Szrj 	    case CLOBBER:
191838fd1498Szrj 	      break;
191938fd1498Szrj 
192038fd1498Szrj 	    case SET:
192138fd1498Szrj 	      /* Ignore SETs whose result isn't used but not those that
192238fd1498Szrj 		 have side-effects.  */
192338fd1498Szrj 	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
192438fd1498Szrj 		  && insn_nothrow_p (insn)
192538fd1498Szrj 		  && !side_effects_p (elt))
192638fd1498Szrj 		break;
192738fd1498Szrj 
192838fd1498Szrj 	      /* If we have already found a SET, this is a second one and
192938fd1498Szrj 		 so we cannot combine with this insn.  */
193038fd1498Szrj 	      if (set)
193138fd1498Szrj 		return 0;
193238fd1498Szrj 
193338fd1498Szrj 	      set = elt;
193438fd1498Szrj 	      break;
193538fd1498Szrj 
193638fd1498Szrj 	    default:
193738fd1498Szrj 	      /* Anything else means we can't combine.  */
193838fd1498Szrj 	      return 0;
193938fd1498Szrj 	    }
194038fd1498Szrj 	}
194138fd1498Szrj 
194238fd1498Szrj       if (set == 0
194338fd1498Szrj 	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
194438fd1498Szrj 	     so don't do anything with it.  */
194538fd1498Szrj 	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
194638fd1498Szrj 	return 0;
194738fd1498Szrj     }
194838fd1498Szrj   else
194938fd1498Szrj     return 0;
195038fd1498Szrj 
195138fd1498Szrj   if (set == 0)
195238fd1498Szrj     return 0;
195338fd1498Szrj 
195438fd1498Szrj   /* The simplification in expand_field_assignment may call back to
195538fd1498Szrj      get_last_value, so set safe guard here.  */
195638fd1498Szrj   subst_low_luid = DF_INSN_LUID (insn);
195738fd1498Szrj 
195838fd1498Szrj   set = expand_field_assignment (set);
195938fd1498Szrj   src = SET_SRC (set), dest = SET_DEST (set);
196038fd1498Szrj 
196138fd1498Szrj   /* Do not eliminate user-specified register if it is in an
196238fd1498Szrj      asm input because we may break the register asm usage defined
196338fd1498Szrj      in GCC manual if allow to do so.
196438fd1498Szrj      Be aware that this may cover more cases than we expect but this
196538fd1498Szrj      should be harmless.  */
196638fd1498Szrj   if (REG_P (dest) && REG_USERVAR_P (dest) && HARD_REGISTER_P (dest)
196738fd1498Szrj       && extract_asm_operands (PATTERN (i3)))
196838fd1498Szrj     return 0;
196938fd1498Szrj 
197038fd1498Szrj   /* Don't eliminate a store in the stack pointer.  */
197138fd1498Szrj   if (dest == stack_pointer_rtx
197238fd1498Szrj       /* Don't combine with an insn that sets a register to itself if it has
197338fd1498Szrj 	 a REG_EQUAL note.  This may be part of a LIBCALL sequence.  */
197438fd1498Szrj       || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
197538fd1498Szrj       /* Can't merge an ASM_OPERANDS.  */
197638fd1498Szrj       || GET_CODE (src) == ASM_OPERANDS
197738fd1498Szrj       /* Can't merge a function call.  */
197838fd1498Szrj       || GET_CODE (src) == CALL
197938fd1498Szrj       /* Don't eliminate a function call argument.  */
198038fd1498Szrj       || (CALL_P (i3)
198138fd1498Szrj 	  && (find_reg_fusage (i3, USE, dest)
198238fd1498Szrj 	      || (REG_P (dest)
198338fd1498Szrj 		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
198438fd1498Szrj 		  && global_regs[REGNO (dest)])))
198538fd1498Szrj       /* Don't substitute into an incremented register.  */
198638fd1498Szrj       || FIND_REG_INC_NOTE (i3, dest)
198738fd1498Szrj       || (succ && FIND_REG_INC_NOTE (succ, dest))
198838fd1498Szrj       || (succ2 && FIND_REG_INC_NOTE (succ2, dest))
198938fd1498Szrj       /* Don't substitute into a non-local goto, this confuses CFG.  */
199038fd1498Szrj       || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
199138fd1498Szrj       /* Make sure that DEST is not used after INSN but before SUCC, or
199238fd1498Szrj 	 after SUCC and before SUCC2, or after SUCC2 but before I3.  */
199338fd1498Szrj       || (!all_adjacent
199438fd1498Szrj 	  && ((succ2
199538fd1498Szrj 	       && (reg_used_between_p (dest, succ2, i3)
199638fd1498Szrj 		   || reg_used_between_p (dest, succ, succ2)))
199738fd1498Szrj 	      || (!succ2 && succ && reg_used_between_p (dest, succ, i3))
199838fd1498Szrj 	      || (!succ2 && !succ && reg_used_between_p (dest, insn, i3))
199938fd1498Szrj 	      || (succ
200038fd1498Szrj 		  /* SUCC and SUCC2 can be split halves from a PARALLEL; in
200138fd1498Szrj 		     that case SUCC is not in the insn stream, so use SUCC2
200238fd1498Szrj 		     instead for this test.  */
200338fd1498Szrj 		  && reg_used_between_p (dest, insn,
200438fd1498Szrj 					 succ2
200538fd1498Szrj 					 && INSN_UID (succ) == INSN_UID (succ2)
200638fd1498Szrj 					 ? succ2 : succ))))
200738fd1498Szrj       /* Make sure that the value that is to be substituted for the register
200838fd1498Szrj 	 does not use any registers whose values alter in between.  However,
200938fd1498Szrj 	 If the insns are adjacent, a use can't cross a set even though we
201038fd1498Szrj 	 think it might (this can happen for a sequence of insns each setting
201138fd1498Szrj 	 the same destination; last_set of that register might point to
201238fd1498Szrj 	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
201338fd1498Szrj 	 equivalent to the memory so the substitution is valid even if there
201438fd1498Szrj 	 are intervening stores.  Also, don't move a volatile asm or
201538fd1498Szrj 	 UNSPEC_VOLATILE across any other insns.  */
201638fd1498Szrj       || (! all_adjacent
201738fd1498Szrj 	  && (((!MEM_P (src)
201838fd1498Szrj 		|| ! find_reg_note (insn, REG_EQUIV, src))
201938fd1498Szrj 	       && modified_between_p (src, insn, i3))
202038fd1498Szrj 	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
202138fd1498Szrj 	      || GET_CODE (src) == UNSPEC_VOLATILE))
202238fd1498Szrj       /* Don't combine across a CALL_INSN, because that would possibly
202338fd1498Szrj 	 change whether the life span of some REGs crosses calls or not,
202438fd1498Szrj 	 and it is a pain to update that information.
202538fd1498Szrj 	 Exception: if source is a constant, moving it later can't hurt.
202638fd1498Szrj 	 Accept that as a special case.  */
202738fd1498Szrj       || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
202838fd1498Szrj     return 0;
202938fd1498Szrj 
203038fd1498Szrj   /* DEST must either be a REG or CC0.  */
203138fd1498Szrj   if (REG_P (dest))
203238fd1498Szrj     {
203338fd1498Szrj       /* If register alignment is being enforced for multi-word items in all
203438fd1498Szrj 	 cases except for parameters, it is possible to have a register copy
203538fd1498Szrj 	 insn referencing a hard register that is not allowed to contain the
203638fd1498Szrj 	 mode being copied and which would not be valid as an operand of most
203738fd1498Szrj 	 insns.  Eliminate this problem by not combining with such an insn.
203838fd1498Szrj 
203938fd1498Szrj 	 Also, on some machines we don't want to extend the life of a hard
204038fd1498Szrj 	 register.  */
204138fd1498Szrj 
204238fd1498Szrj       if (REG_P (src)
204338fd1498Szrj 	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
204438fd1498Szrj 	       && !targetm.hard_regno_mode_ok (REGNO (dest), GET_MODE (dest)))
204538fd1498Szrj 	      /* Don't extend the life of a hard register unless it is
204638fd1498Szrj 		 user variable (if we have few registers) or it can't
204738fd1498Szrj 		 fit into the desired register (meaning something special
204838fd1498Szrj 		 is going on).
204938fd1498Szrj 		 Also avoid substituting a return register into I3, because
205038fd1498Szrj 		 reload can't handle a conflict with constraints of other
205138fd1498Szrj 		 inputs.  */
205238fd1498Szrj 	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
205338fd1498Szrj 		  && !targetm.hard_regno_mode_ok (REGNO (src),
205438fd1498Szrj 						  GET_MODE (src)))))
205538fd1498Szrj 	return 0;
205638fd1498Szrj     }
205738fd1498Szrj   else if (GET_CODE (dest) != CC0)
205838fd1498Szrj     return 0;
205938fd1498Szrj 
206038fd1498Szrj 
206138fd1498Szrj   if (GET_CODE (PATTERN (i3)) == PARALLEL)
206238fd1498Szrj     for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
206338fd1498Szrj       if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
206438fd1498Szrj 	{
206538fd1498Szrj 	  rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
206638fd1498Szrj 
206738fd1498Szrj 	  /* If the clobber represents an earlyclobber operand, we must not
206838fd1498Szrj 	     substitute an expression containing the clobbered register.
206938fd1498Szrj 	     As we do not analyze the constraint strings here, we have to
207038fd1498Szrj 	     make the conservative assumption.  However, if the register is
207138fd1498Szrj 	     a fixed hard reg, the clobber cannot represent any operand;
207238fd1498Szrj 	     we leave it up to the machine description to either accept or
207338fd1498Szrj 	     reject use-and-clobber patterns.  */
207438fd1498Szrj 	  if (!REG_P (reg)
207538fd1498Szrj 	      || REGNO (reg) >= FIRST_PSEUDO_REGISTER
207638fd1498Szrj 	      || !fixed_regs[REGNO (reg)])
207738fd1498Szrj 	    if (reg_overlap_mentioned_p (reg, src))
207838fd1498Szrj 	      return 0;
207938fd1498Szrj 	}
208038fd1498Szrj 
208138fd1498Szrj   /* If INSN contains anything volatile, or is an `asm' (whether volatile
208238fd1498Szrj      or not), reject, unless nothing volatile comes between it and I3 */
208338fd1498Szrj 
208438fd1498Szrj   if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
208538fd1498Szrj     {
208638fd1498Szrj       /* Make sure neither succ nor succ2 contains a volatile reference.  */
208738fd1498Szrj       if (succ2 != 0 && volatile_refs_p (PATTERN (succ2)))
208838fd1498Szrj 	return 0;
208938fd1498Szrj       if (succ != 0 && volatile_refs_p (PATTERN (succ)))
209038fd1498Szrj 	return 0;
209138fd1498Szrj       /* We'll check insns between INSN and I3 below.  */
209238fd1498Szrj     }
209338fd1498Szrj 
209438fd1498Szrj   /* If INSN is an asm, and DEST is a hard register, reject, since it has
209538fd1498Szrj      to be an explicit register variable, and was chosen for a reason.  */
209638fd1498Szrj 
209738fd1498Szrj   if (GET_CODE (src) == ASM_OPERANDS
209838fd1498Szrj       && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
209938fd1498Szrj     return 0;
210038fd1498Szrj 
210138fd1498Szrj   /* If INSN contains volatile references (specifically volatile MEMs),
210238fd1498Szrj      we cannot combine across any other volatile references.
210338fd1498Szrj      Even if INSN doesn't contain volatile references, any intervening
210438fd1498Szrj      volatile insn might affect machine state.  */
210538fd1498Szrj 
210638fd1498Szrj   is_volatile_p = volatile_refs_p (PATTERN (insn))
210738fd1498Szrj     ? volatile_refs_p
210838fd1498Szrj     : volatile_insn_p;
210938fd1498Szrj 
211038fd1498Szrj   for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
211138fd1498Szrj     if (INSN_P (p) && p != succ && p != succ2 && is_volatile_p (PATTERN (p)))
211238fd1498Szrj       return 0;
211338fd1498Szrj 
211438fd1498Szrj   /* If INSN contains an autoincrement or autodecrement, make sure that
211538fd1498Szrj      register is not used between there and I3, and not already used in
211638fd1498Szrj      I3 either.  Neither must it be used in PRED or SUCC, if they exist.
211738fd1498Szrj      Also insist that I3 not be a jump; if it were one
211838fd1498Szrj      and the incremented register were spilled, we would lose.  */
211938fd1498Szrj 
212038fd1498Szrj   if (AUTO_INC_DEC)
212138fd1498Szrj     for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
212238fd1498Szrj       if (REG_NOTE_KIND (link) == REG_INC
212338fd1498Szrj 	  && (JUMP_P (i3)
212438fd1498Szrj 	      || reg_used_between_p (XEXP (link, 0), insn, i3)
212538fd1498Szrj 	      || (pred != NULL_RTX
212638fd1498Szrj 		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
212738fd1498Szrj 	      || (pred2 != NULL_RTX
212838fd1498Szrj 		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred2)))
212938fd1498Szrj 	      || (succ != NULL_RTX
213038fd1498Szrj 		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
213138fd1498Szrj 	      || (succ2 != NULL_RTX
213238fd1498Szrj 		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ2)))
213338fd1498Szrj 	      || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
213438fd1498Szrj 	return 0;
213538fd1498Szrj 
213638fd1498Szrj   /* Don't combine an insn that follows a CC0-setting insn.
213738fd1498Szrj      An insn that uses CC0 must not be separated from the one that sets it.
213838fd1498Szrj      We do, however, allow I2 to follow a CC0-setting insn if that insn
213938fd1498Szrj      is passed as I1; in that case it will be deleted also.
214038fd1498Szrj      We also allow combining in this case if all the insns are adjacent
214138fd1498Szrj      because that would leave the two CC0 insns adjacent as well.
214238fd1498Szrj      It would be more logical to test whether CC0 occurs inside I1 or I2,
214338fd1498Szrj      but that would be much slower, and this ought to be equivalent.  */
214438fd1498Szrj 
214538fd1498Szrj   if (HAVE_cc0)
214638fd1498Szrj     {
214738fd1498Szrj       p = prev_nonnote_insn (insn);
214838fd1498Szrj       if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
214938fd1498Szrj 	  && ! all_adjacent)
215038fd1498Szrj 	return 0;
215138fd1498Szrj     }
215238fd1498Szrj 
215338fd1498Szrj   /* If we get here, we have passed all the tests and the combination is
215438fd1498Szrj      to be allowed.  */
215538fd1498Szrj 
215638fd1498Szrj   *pdest = dest;
215738fd1498Szrj   *psrc = src;
215838fd1498Szrj 
215938fd1498Szrj   return 1;
216038fd1498Szrj }
216138fd1498Szrj 
216238fd1498Szrj /* LOC is the location within I3 that contains its pattern or the component
216338fd1498Szrj    of a PARALLEL of the pattern.  We validate that it is valid for combining.
216438fd1498Szrj 
216538fd1498Szrj    One problem is if I3 modifies its output, as opposed to replacing it
216638fd1498Szrj    entirely, we can't allow the output to contain I2DEST, I1DEST or I0DEST as
216738fd1498Szrj    doing so would produce an insn that is not equivalent to the original insns.
216838fd1498Szrj 
216938fd1498Szrj    Consider:
217038fd1498Szrj 
217138fd1498Szrj 	 (set (reg:DI 101) (reg:DI 100))
217238fd1498Szrj 	 (set (subreg:SI (reg:DI 101) 0) <foo>)
217338fd1498Szrj 
217438fd1498Szrj    This is NOT equivalent to:
217538fd1498Szrj 
217638fd1498Szrj 	 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
217738fd1498Szrj 		    (set (reg:DI 101) (reg:DI 100))])
217838fd1498Szrj 
217938fd1498Szrj    Not only does this modify 100 (in which case it might still be valid
218038fd1498Szrj    if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
218138fd1498Szrj 
218238fd1498Szrj    We can also run into a problem if I2 sets a register that I1
218338fd1498Szrj    uses and I1 gets directly substituted into I3 (not via I2).  In that
218438fd1498Szrj    case, we would be getting the wrong value of I2DEST into I3, so we
218538fd1498Szrj    must reject the combination.  This case occurs when I2 and I1 both
218638fd1498Szrj    feed into I3, rather than when I1 feeds into I2, which feeds into I3.
218738fd1498Szrj    If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
218838fd1498Szrj    of a SET must prevent combination from occurring.  The same situation
218938fd1498Szrj    can occur for I0, in which case I0_NOT_IN_SRC is set.
219038fd1498Szrj 
219138fd1498Szrj    Before doing the above check, we first try to expand a field assignment
219238fd1498Szrj    into a set of logical operations.
219338fd1498Szrj 
219438fd1498Szrj    If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
219538fd1498Szrj    we place a register that is both set and used within I3.  If more than one
219638fd1498Szrj    such register is detected, we fail.
219738fd1498Szrj 
219838fd1498Szrj    Return 1 if the combination is valid, zero otherwise.  */
219938fd1498Szrj 
220038fd1498Szrj static int
combinable_i3pat(rtx_insn * i3,rtx * loc,rtx i2dest,rtx i1dest,rtx i0dest,int i1_not_in_src,int i0_not_in_src,rtx * pi3dest_killed)220138fd1498Szrj combinable_i3pat (rtx_insn *i3, rtx *loc, rtx i2dest, rtx i1dest, rtx i0dest,
220238fd1498Szrj 		  int i1_not_in_src, int i0_not_in_src, rtx *pi3dest_killed)
220338fd1498Szrj {
220438fd1498Szrj   rtx x = *loc;
220538fd1498Szrj 
220638fd1498Szrj   if (GET_CODE (x) == SET)
220738fd1498Szrj     {
220838fd1498Szrj       rtx set = x ;
220938fd1498Szrj       rtx dest = SET_DEST (set);
221038fd1498Szrj       rtx src = SET_SRC (set);
221138fd1498Szrj       rtx inner_dest = dest;
221238fd1498Szrj       rtx subdest;
221338fd1498Szrj 
221438fd1498Szrj       while (GET_CODE (inner_dest) == STRICT_LOW_PART
221538fd1498Szrj 	     || GET_CODE (inner_dest) == SUBREG
221638fd1498Szrj 	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
221738fd1498Szrj 	inner_dest = XEXP (inner_dest, 0);
221838fd1498Szrj 
221938fd1498Szrj       /* Check for the case where I3 modifies its output, as discussed
222038fd1498Szrj 	 above.  We don't want to prevent pseudos from being combined
222138fd1498Szrj 	 into the address of a MEM, so only prevent the combination if
222238fd1498Szrj 	 i1 or i2 set the same MEM.  */
222338fd1498Szrj       if ((inner_dest != dest &&
222438fd1498Szrj 	   (!MEM_P (inner_dest)
222538fd1498Szrj 	    || rtx_equal_p (i2dest, inner_dest)
222638fd1498Szrj 	    || (i1dest && rtx_equal_p (i1dest, inner_dest))
222738fd1498Szrj 	    || (i0dest && rtx_equal_p (i0dest, inner_dest)))
222838fd1498Szrj 	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
222938fd1498Szrj 	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))
223038fd1498Szrj 	       || (i0dest && reg_overlap_mentioned_p (i0dest, inner_dest))))
223138fd1498Szrj 
223238fd1498Szrj 	  /* This is the same test done in can_combine_p except we can't test
223338fd1498Szrj 	     all_adjacent; we don't have to, since this instruction will stay
223438fd1498Szrj 	     in place, thus we are not considering increasing the lifetime of
223538fd1498Szrj 	     INNER_DEST.
223638fd1498Szrj 
223738fd1498Szrj 	     Also, if this insn sets a function argument, combining it with
223838fd1498Szrj 	     something that might need a spill could clobber a previous
223938fd1498Szrj 	     function argument; the all_adjacent test in can_combine_p also
224038fd1498Szrj 	     checks this; here, we do a more specific test for this case.  */
224138fd1498Szrj 
224238fd1498Szrj 	  || (REG_P (inner_dest)
224338fd1498Szrj 	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
224438fd1498Szrj 	      && !targetm.hard_regno_mode_ok (REGNO (inner_dest),
224538fd1498Szrj 					      GET_MODE (inner_dest)))
224638fd1498Szrj 	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))
224738fd1498Szrj 	  || (i0_not_in_src && reg_overlap_mentioned_p (i0dest, src)))
224838fd1498Szrj 	return 0;
224938fd1498Szrj 
225038fd1498Szrj       /* If DEST is used in I3, it is being killed in this insn, so
225138fd1498Szrj 	 record that for later.  We have to consider paradoxical
225238fd1498Szrj 	 subregs here, since they kill the whole register, but we
225338fd1498Szrj 	 ignore partial subregs, STRICT_LOW_PART, etc.
225438fd1498Szrj 	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
225538fd1498Szrj 	 STACK_POINTER_REGNUM, since these are always considered to be
225638fd1498Szrj 	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
225738fd1498Szrj       subdest = dest;
225838fd1498Szrj       if (GET_CODE (subdest) == SUBREG && !partial_subreg_p (subdest))
225938fd1498Szrj 	subdest = SUBREG_REG (subdest);
226038fd1498Szrj       if (pi3dest_killed
226138fd1498Szrj 	  && REG_P (subdest)
226238fd1498Szrj 	  && reg_referenced_p (subdest, PATTERN (i3))
226338fd1498Szrj 	  && REGNO (subdest) != FRAME_POINTER_REGNUM
226438fd1498Szrj 	  && (HARD_FRAME_POINTER_IS_FRAME_POINTER
226538fd1498Szrj 	      || REGNO (subdest) != HARD_FRAME_POINTER_REGNUM)
226638fd1498Szrj 	  && (FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM
226738fd1498Szrj 	      || (REGNO (subdest) != ARG_POINTER_REGNUM
226838fd1498Szrj 		  || ! fixed_regs [REGNO (subdest)]))
226938fd1498Szrj 	  && REGNO (subdest) != STACK_POINTER_REGNUM)
227038fd1498Szrj 	{
227138fd1498Szrj 	  if (*pi3dest_killed)
227238fd1498Szrj 	    return 0;
227338fd1498Szrj 
227438fd1498Szrj 	  *pi3dest_killed = subdest;
227538fd1498Szrj 	}
227638fd1498Szrj     }
227738fd1498Szrj 
227838fd1498Szrj   else if (GET_CODE (x) == PARALLEL)
227938fd1498Szrj     {
228038fd1498Szrj       int i;
228138fd1498Szrj 
228238fd1498Szrj       for (i = 0; i < XVECLEN (x, 0); i++)
228338fd1498Szrj 	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, i0dest,
228438fd1498Szrj 				i1_not_in_src, i0_not_in_src, pi3dest_killed))
228538fd1498Szrj 	  return 0;
228638fd1498Szrj     }
228738fd1498Szrj 
228838fd1498Szrj   return 1;
228938fd1498Szrj }
229038fd1498Szrj 
229138fd1498Szrj /* Return 1 if X is an arithmetic expression that contains a multiplication
229238fd1498Szrj    and division.  We don't count multiplications by powers of two here.  */
229338fd1498Szrj 
229438fd1498Szrj static int
contains_muldiv(rtx x)229538fd1498Szrj contains_muldiv (rtx x)
229638fd1498Szrj {
229738fd1498Szrj   switch (GET_CODE (x))
229838fd1498Szrj     {
229938fd1498Szrj     case MOD:  case DIV:  case UMOD:  case UDIV:
230038fd1498Szrj       return 1;
230138fd1498Szrj 
230238fd1498Szrj     case MULT:
230338fd1498Szrj       return ! (CONST_INT_P (XEXP (x, 1))
230438fd1498Szrj 		&& pow2p_hwi (UINTVAL (XEXP (x, 1))));
230538fd1498Szrj     default:
230638fd1498Szrj       if (BINARY_P (x))
230738fd1498Szrj 	return contains_muldiv (XEXP (x, 0))
230838fd1498Szrj 	    || contains_muldiv (XEXP (x, 1));
230938fd1498Szrj 
231038fd1498Szrj       if (UNARY_P (x))
231138fd1498Szrj 	return contains_muldiv (XEXP (x, 0));
231238fd1498Szrj 
231338fd1498Szrj       return 0;
231438fd1498Szrj     }
231538fd1498Szrj }
231638fd1498Szrj 
231738fd1498Szrj /* Determine whether INSN can be used in a combination.  Return nonzero if
231838fd1498Szrj    not.  This is used in try_combine to detect early some cases where we
231938fd1498Szrj    can't perform combinations.  */
232038fd1498Szrj 
232138fd1498Szrj static int
cant_combine_insn_p(rtx_insn * insn)232238fd1498Szrj cant_combine_insn_p (rtx_insn *insn)
232338fd1498Szrj {
232438fd1498Szrj   rtx set;
232538fd1498Szrj   rtx src, dest;
232638fd1498Szrj 
232738fd1498Szrj   /* If this isn't really an insn, we can't do anything.
232838fd1498Szrj      This can occur when flow deletes an insn that it has merged into an
232938fd1498Szrj      auto-increment address.  */
233038fd1498Szrj   if (!NONDEBUG_INSN_P (insn))
233138fd1498Szrj     return 1;
233238fd1498Szrj 
233338fd1498Szrj   /* Never combine loads and stores involving hard regs that are likely
233438fd1498Szrj      to be spilled.  The register allocator can usually handle such
233538fd1498Szrj      reg-reg moves by tying.  If we allow the combiner to make
233638fd1498Szrj      substitutions of likely-spilled regs, reload might die.
233738fd1498Szrj      As an exception, we allow combinations involving fixed regs; these are
233838fd1498Szrj      not available to the register allocator so there's no risk involved.  */
233938fd1498Szrj 
234038fd1498Szrj   set = single_set (insn);
234138fd1498Szrj   if (! set)
234238fd1498Szrj     return 0;
234338fd1498Szrj   src = SET_SRC (set);
234438fd1498Szrj   dest = SET_DEST (set);
234538fd1498Szrj   if (GET_CODE (src) == SUBREG)
234638fd1498Szrj     src = SUBREG_REG (src);
234738fd1498Szrj   if (GET_CODE (dest) == SUBREG)
234838fd1498Szrj     dest = SUBREG_REG (dest);
234938fd1498Szrj   if (REG_P (src) && REG_P (dest)
235038fd1498Szrj       && ((HARD_REGISTER_P (src)
235138fd1498Szrj 	   && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (src))
235238fd1498Szrj 	   && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (src))))
235338fd1498Szrj 	  || (HARD_REGISTER_P (dest)
235438fd1498Szrj 	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (dest))
235538fd1498Szrj 	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (dest))))))
235638fd1498Szrj     return 1;
235738fd1498Szrj 
235838fd1498Szrj   return 0;
235938fd1498Szrj }
236038fd1498Szrj 
236138fd1498Szrj struct likely_spilled_retval_info
236238fd1498Szrj {
236338fd1498Szrj   unsigned regno, nregs;
236438fd1498Szrj   unsigned mask;
236538fd1498Szrj };
236638fd1498Szrj 
236738fd1498Szrj /* Called via note_stores by likely_spilled_retval_p.  Remove from info->mask
236838fd1498Szrj    hard registers that are known to be written to / clobbered in full.  */
236938fd1498Szrj static void
likely_spilled_retval_1(rtx x,const_rtx set,void * data)237038fd1498Szrj likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
237138fd1498Szrj {
237238fd1498Szrj   struct likely_spilled_retval_info *const info =
237338fd1498Szrj     (struct likely_spilled_retval_info *) data;
237438fd1498Szrj   unsigned regno, nregs;
237538fd1498Szrj   unsigned new_mask;
237638fd1498Szrj 
237738fd1498Szrj   if (!REG_P (XEXP (set, 0)))
237838fd1498Szrj     return;
237938fd1498Szrj   regno = REGNO (x);
238038fd1498Szrj   if (regno >= info->regno + info->nregs)
238138fd1498Szrj     return;
238238fd1498Szrj   nregs = REG_NREGS (x);
238338fd1498Szrj   if (regno + nregs <= info->regno)
238438fd1498Szrj     return;
238538fd1498Szrj   new_mask = (2U << (nregs - 1)) - 1;
238638fd1498Szrj   if (regno < info->regno)
238738fd1498Szrj     new_mask >>= info->regno - regno;
238838fd1498Szrj   else
238938fd1498Szrj     new_mask <<= regno - info->regno;
239038fd1498Szrj   info->mask &= ~new_mask;
239138fd1498Szrj }
239238fd1498Szrj 
239338fd1498Szrj /* Return nonzero iff part of the return value is live during INSN, and
239438fd1498Szrj    it is likely spilled.  This can happen when more than one insn is needed
239538fd1498Szrj    to copy the return value, e.g. when we consider to combine into the
239638fd1498Szrj    second copy insn for a complex value.  */
239738fd1498Szrj 
239838fd1498Szrj static int
likely_spilled_retval_p(rtx_insn * insn)239938fd1498Szrj likely_spilled_retval_p (rtx_insn *insn)
240038fd1498Szrj {
240138fd1498Szrj   rtx_insn *use = BB_END (this_basic_block);
240238fd1498Szrj   rtx reg;
240338fd1498Szrj   rtx_insn *p;
240438fd1498Szrj   unsigned regno, nregs;
240538fd1498Szrj   /* We assume here that no machine mode needs more than
240638fd1498Szrj      32 hard registers when the value overlaps with a register
240738fd1498Szrj      for which TARGET_FUNCTION_VALUE_REGNO_P is true.  */
240838fd1498Szrj   unsigned mask;
240938fd1498Szrj   struct likely_spilled_retval_info info;
241038fd1498Szrj 
241138fd1498Szrj   if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
241238fd1498Szrj     return 0;
241338fd1498Szrj   reg = XEXP (PATTERN (use), 0);
241438fd1498Szrj   if (!REG_P (reg) || !targetm.calls.function_value_regno_p (REGNO (reg)))
241538fd1498Szrj     return 0;
241638fd1498Szrj   regno = REGNO (reg);
241738fd1498Szrj   nregs = REG_NREGS (reg);
241838fd1498Szrj   if (nregs == 1)
241938fd1498Szrj     return 0;
242038fd1498Szrj   mask = (2U << (nregs - 1)) - 1;
242138fd1498Szrj 
242238fd1498Szrj   /* Disregard parts of the return value that are set later.  */
242338fd1498Szrj   info.regno = regno;
242438fd1498Szrj   info.nregs = nregs;
242538fd1498Szrj   info.mask = mask;
242638fd1498Szrj   for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
242738fd1498Szrj     if (INSN_P (p))
242838fd1498Szrj       note_stores (PATTERN (p), likely_spilled_retval_1, &info);
242938fd1498Szrj   mask = info.mask;
243038fd1498Szrj 
243138fd1498Szrj   /* Check if any of the (probably) live return value registers is
243238fd1498Szrj      likely spilled.  */
243338fd1498Szrj   nregs --;
243438fd1498Szrj   do
243538fd1498Szrj     {
243638fd1498Szrj       if ((mask & 1 << nregs)
243738fd1498Szrj 	  && targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno + nregs)))
243838fd1498Szrj 	return 1;
243938fd1498Szrj     } while (nregs--);
244038fd1498Szrj   return 0;
244138fd1498Szrj }
244238fd1498Szrj 
244338fd1498Szrj /* Adjust INSN after we made a change to its destination.
244438fd1498Szrj 
244538fd1498Szrj    Changing the destination can invalidate notes that say something about
244638fd1498Szrj    the results of the insn and a LOG_LINK pointing to the insn.  */
244738fd1498Szrj 
244838fd1498Szrj static void
adjust_for_new_dest(rtx_insn * insn)244938fd1498Szrj adjust_for_new_dest (rtx_insn *insn)
245038fd1498Szrj {
245138fd1498Szrj   /* For notes, be conservative and simply remove them.  */
245238fd1498Szrj   remove_reg_equal_equiv_notes (insn);
245338fd1498Szrj 
245438fd1498Szrj   /* The new insn will have a destination that was previously the destination
245538fd1498Szrj      of an insn just above it.  Call distribute_links to make a LOG_LINK from
245638fd1498Szrj      the next use of that destination.  */
245738fd1498Szrj 
245838fd1498Szrj   rtx set = single_set (insn);
245938fd1498Szrj   gcc_assert (set);
246038fd1498Szrj 
246138fd1498Szrj   rtx reg = SET_DEST (set);
246238fd1498Szrj 
246338fd1498Szrj   while (GET_CODE (reg) == ZERO_EXTRACT
246438fd1498Szrj 	 || GET_CODE (reg) == STRICT_LOW_PART
246538fd1498Szrj 	 || GET_CODE (reg) == SUBREG)
246638fd1498Szrj     reg = XEXP (reg, 0);
246738fd1498Szrj   gcc_assert (REG_P (reg));
246838fd1498Szrj 
246938fd1498Szrj   distribute_links (alloc_insn_link (insn, REGNO (reg), NULL));
247038fd1498Szrj 
247138fd1498Szrj   df_insn_rescan (insn);
247238fd1498Szrj }
247338fd1498Szrj 
247438fd1498Szrj /* Return TRUE if combine can reuse reg X in mode MODE.
247538fd1498Szrj    ADDED_SETS is nonzero if the original set is still required.  */
247638fd1498Szrj static bool
can_change_dest_mode(rtx x,int added_sets,machine_mode mode)247738fd1498Szrj can_change_dest_mode (rtx x, int added_sets, machine_mode mode)
247838fd1498Szrj {
247938fd1498Szrj   unsigned int regno;
248038fd1498Szrj 
248138fd1498Szrj   if (!REG_P (x))
248238fd1498Szrj     return false;
248338fd1498Szrj 
248438fd1498Szrj   /* Don't change between modes with different underlying register sizes,
248538fd1498Szrj      since this could lead to invalid subregs.  */
248638fd1498Szrj   if (maybe_ne (REGMODE_NATURAL_SIZE (mode),
248738fd1498Szrj 		REGMODE_NATURAL_SIZE (GET_MODE (x))))
248838fd1498Szrj     return false;
248938fd1498Szrj 
249038fd1498Szrj   regno = REGNO (x);
249138fd1498Szrj   /* Allow hard registers if the new mode is legal, and occupies no more
249238fd1498Szrj      registers than the old mode.  */
249338fd1498Szrj   if (regno < FIRST_PSEUDO_REGISTER)
249438fd1498Szrj     return (targetm.hard_regno_mode_ok (regno, mode)
249538fd1498Szrj 	    && REG_NREGS (x) >= hard_regno_nregs (regno, mode));
249638fd1498Szrj 
249738fd1498Szrj   /* Or a pseudo that is only used once.  */
249838fd1498Szrj   return (regno < reg_n_sets_max
249938fd1498Szrj 	  && REG_N_SETS (regno) == 1
250038fd1498Szrj 	  && !added_sets
250138fd1498Szrj 	  && !REG_USERVAR_P (x));
250238fd1498Szrj }
250338fd1498Szrj 
250438fd1498Szrj 
250538fd1498Szrj /* Check whether X, the destination of a set, refers to part of
250638fd1498Szrj    the register specified by REG.  */
250738fd1498Szrj 
250838fd1498Szrj static bool
reg_subword_p(rtx x,rtx reg)250938fd1498Szrj reg_subword_p (rtx x, rtx reg)
251038fd1498Szrj {
251138fd1498Szrj   /* Check that reg is an integer mode register.  */
251238fd1498Szrj   if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
251338fd1498Szrj     return false;
251438fd1498Szrj 
251538fd1498Szrj   if (GET_CODE (x) == STRICT_LOW_PART
251638fd1498Szrj       || GET_CODE (x) == ZERO_EXTRACT)
251738fd1498Szrj     x = XEXP (x, 0);
251838fd1498Szrj 
251938fd1498Szrj   return GET_CODE (x) == SUBREG
252038fd1498Szrj 	 && SUBREG_REG (x) == reg
252138fd1498Szrj 	 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
252238fd1498Szrj }
252338fd1498Szrj 
252438fd1498Szrj /* Delete the unconditional jump INSN and adjust the CFG correspondingly.
252538fd1498Szrj    Note that the INSN should be deleted *after* removing dead edges, so
252638fd1498Szrj    that the kept edge is the fallthrough edge for a (set (pc) (pc))
252738fd1498Szrj    but not for a (set (pc) (label_ref FOO)).  */
252838fd1498Szrj 
252938fd1498Szrj static void
update_cfg_for_uncondjump(rtx_insn * insn)253038fd1498Szrj update_cfg_for_uncondjump (rtx_insn *insn)
253138fd1498Szrj {
253238fd1498Szrj   basic_block bb = BLOCK_FOR_INSN (insn);
253338fd1498Szrj   gcc_assert (BB_END (bb) == insn);
253438fd1498Szrj 
253538fd1498Szrj   purge_dead_edges (bb);
253638fd1498Szrj 
253738fd1498Szrj   delete_insn (insn);
253838fd1498Szrj   if (EDGE_COUNT (bb->succs) == 1)
253938fd1498Szrj     {
254038fd1498Szrj       rtx_insn *insn;
254138fd1498Szrj 
254238fd1498Szrj       single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
254338fd1498Szrj 
254438fd1498Szrj       /* Remove barriers from the footer if there are any.  */
254538fd1498Szrj       for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
254638fd1498Szrj 	if (BARRIER_P (insn))
254738fd1498Szrj 	  {
254838fd1498Szrj 	    if (PREV_INSN (insn))
254938fd1498Szrj 	      SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
255038fd1498Szrj 	    else
255138fd1498Szrj 	      BB_FOOTER (bb) = NEXT_INSN (insn);
255238fd1498Szrj 	    if (NEXT_INSN (insn))
255338fd1498Szrj 	      SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
255438fd1498Szrj 	  }
255538fd1498Szrj 	else if (LABEL_P (insn))
255638fd1498Szrj 	  break;
255738fd1498Szrj     }
255838fd1498Szrj }
255938fd1498Szrj 
256038fd1498Szrj /* Return whether PAT is a PARALLEL of exactly N register SETs followed
256138fd1498Szrj    by an arbitrary number of CLOBBERs.  */
256238fd1498Szrj static bool
is_parallel_of_n_reg_sets(rtx pat,int n)256338fd1498Szrj is_parallel_of_n_reg_sets (rtx pat, int n)
256438fd1498Szrj {
256538fd1498Szrj   if (GET_CODE (pat) != PARALLEL)
256638fd1498Szrj     return false;
256738fd1498Szrj 
256838fd1498Szrj   int len = XVECLEN (pat, 0);
256938fd1498Szrj   if (len < n)
257038fd1498Szrj     return false;
257138fd1498Szrj 
257238fd1498Szrj   int i;
257338fd1498Szrj   for (i = 0; i < n; i++)
257438fd1498Szrj     if (GET_CODE (XVECEXP (pat, 0, i)) != SET
257538fd1498Szrj 	|| !REG_P (SET_DEST (XVECEXP (pat, 0, i))))
257638fd1498Szrj       return false;
257738fd1498Szrj   for ( ; i < len; i++)
257838fd1498Szrj     if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER
257938fd1498Szrj 	|| XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
258038fd1498Szrj       return false;
258138fd1498Szrj 
258238fd1498Szrj   return true;
258338fd1498Szrj }
258438fd1498Szrj 
258538fd1498Szrj /* Return whether INSN, a PARALLEL of N register SETs (and maybe some
258638fd1498Szrj    CLOBBERs), can be split into individual SETs in that order, without
258738fd1498Szrj    changing semantics.  */
258838fd1498Szrj static bool
can_split_parallel_of_n_reg_sets(rtx_insn * insn,int n)258938fd1498Szrj can_split_parallel_of_n_reg_sets (rtx_insn *insn, int n)
259038fd1498Szrj {
259138fd1498Szrj   if (!insn_nothrow_p (insn))
259238fd1498Szrj     return false;
259338fd1498Szrj 
259438fd1498Szrj   rtx pat = PATTERN (insn);
259538fd1498Szrj 
259638fd1498Szrj   int i, j;
259738fd1498Szrj   for (i = 0; i < n; i++)
259838fd1498Szrj     {
259938fd1498Szrj       if (side_effects_p (SET_SRC (XVECEXP (pat, 0, i))))
260038fd1498Szrj 	return false;
260138fd1498Szrj 
260238fd1498Szrj       rtx reg = SET_DEST (XVECEXP (pat, 0, i));
260338fd1498Szrj 
260438fd1498Szrj       for (j = i + 1; j < n; j++)
260538fd1498Szrj 	if (reg_referenced_p (reg, XVECEXP (pat, 0, j)))
260638fd1498Szrj 	  return false;
260738fd1498Szrj     }
260838fd1498Szrj 
260938fd1498Szrj   return true;
261038fd1498Szrj }
261138fd1498Szrj 
261238fd1498Szrj /* Try to combine the insns I0, I1 and I2 into I3.
261338fd1498Szrj    Here I0, I1 and I2 appear earlier than I3.
261438fd1498Szrj    I0 and I1 can be zero; then we combine just I2 into I3, or I1 and I2 into
261538fd1498Szrj    I3.
261638fd1498Szrj 
261738fd1498Szrj    If we are combining more than two insns and the resulting insn is not
261838fd1498Szrj    recognized, try splitting it into two insns.  If that happens, I2 and I3
261938fd1498Szrj    are retained and I1/I0 are pseudo-deleted by turning them into a NOTE.
262038fd1498Szrj    Otherwise, I0, I1 and I2 are pseudo-deleted.
262138fd1498Szrj 
262238fd1498Szrj    Return 0 if the combination does not work.  Then nothing is changed.
262338fd1498Szrj    If we did the combination, return the insn at which combine should
262438fd1498Szrj    resume scanning.
262538fd1498Szrj 
262638fd1498Szrj    Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
262738fd1498Szrj    new direct jump instruction.
262838fd1498Szrj 
262938fd1498Szrj    LAST_COMBINED_INSN is either I3, or some insn after I3 that has
263038fd1498Szrj    been I3 passed to an earlier try_combine within the same basic
263138fd1498Szrj    block.  */
263238fd1498Szrj 
263338fd1498Szrj static rtx_insn *
try_combine(rtx_insn * i3,rtx_insn * i2,rtx_insn * i1,rtx_insn * i0,int * new_direct_jump_p,rtx_insn * last_combined_insn)263438fd1498Szrj try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
263538fd1498Szrj 	     int *new_direct_jump_p, rtx_insn *last_combined_insn)
263638fd1498Szrj {
263738fd1498Szrj   /* New patterns for I3 and I2, respectively.  */
263838fd1498Szrj   rtx newpat, newi2pat = 0;
263938fd1498Szrj   rtvec newpat_vec_with_clobbers = 0;
264038fd1498Szrj   int substed_i2 = 0, substed_i1 = 0, substed_i0 = 0;
264138fd1498Szrj   /* Indicates need to preserve SET in I0, I1 or I2 in I3 if it is not
264238fd1498Szrj      dead.  */
264338fd1498Szrj   int added_sets_0, added_sets_1, added_sets_2;
264438fd1498Szrj   /* Total number of SETs to put into I3.  */
264538fd1498Szrj   int total_sets;
264638fd1498Szrj   /* Nonzero if I2's or I1's body now appears in I3.  */
264738fd1498Szrj   int i2_is_used = 0, i1_is_used = 0;
264838fd1498Szrj   /* INSN_CODEs for new I3, new I2, and user of condition code.  */
264938fd1498Szrj   int insn_code_number, i2_code_number = 0, other_code_number = 0;
265038fd1498Szrj   /* Contains I3 if the destination of I3 is used in its source, which means
265138fd1498Szrj      that the old life of I3 is being killed.  If that usage is placed into
265238fd1498Szrj      I2 and not in I3, a REG_DEAD note must be made.  */
265338fd1498Szrj   rtx i3dest_killed = 0;
265438fd1498Szrj   /* SET_DEST and SET_SRC of I2, I1 and I0.  */
265538fd1498Szrj   rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0, i0dest = 0, i0src = 0;
265638fd1498Szrj   /* Copy of SET_SRC of I1 and I0, if needed.  */
265738fd1498Szrj   rtx i1src_copy = 0, i0src_copy = 0, i0src_copy2 = 0;
265838fd1498Szrj   /* Set if I2DEST was reused as a scratch register.  */
265938fd1498Szrj   bool i2scratch = false;
266038fd1498Szrj   /* The PATTERNs of I0, I1, and I2, or a copy of them in certain cases.  */
266138fd1498Szrj   rtx i0pat = 0, i1pat = 0, i2pat = 0;
266238fd1498Szrj   /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
266338fd1498Szrj   int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
266438fd1498Szrj   int i0dest_in_i0src = 0, i1dest_in_i0src = 0, i2dest_in_i0src = 0;
266538fd1498Szrj   int i2dest_killed = 0, i1dest_killed = 0, i0dest_killed = 0;
266638fd1498Szrj   int i1_feeds_i2_n = 0, i0_feeds_i2_n = 0, i0_feeds_i1_n = 0;
266738fd1498Szrj   /* Notes that must be added to REG_NOTES in I3 and I2.  */
266838fd1498Szrj   rtx new_i3_notes, new_i2_notes;
266938fd1498Szrj   /* Notes that we substituted I3 into I2 instead of the normal case.  */
267038fd1498Szrj   int i3_subst_into_i2 = 0;
267138fd1498Szrj   /* Notes that I1, I2 or I3 is a MULT operation.  */
267238fd1498Szrj   int have_mult = 0;
267338fd1498Szrj   int swap_i2i3 = 0;
267438fd1498Szrj   int split_i2i3 = 0;
267538fd1498Szrj   int changed_i3_dest = 0;
267638fd1498Szrj 
267738fd1498Szrj   int maxreg;
267838fd1498Szrj   rtx_insn *temp_insn;
267938fd1498Szrj   rtx temp_expr;
268038fd1498Szrj   struct insn_link *link;
268138fd1498Szrj   rtx other_pat = 0;
268238fd1498Szrj   rtx new_other_notes;
268338fd1498Szrj   int i;
268438fd1498Szrj   scalar_int_mode dest_mode, temp_mode;
268538fd1498Szrj 
268638fd1498Szrj   /* Immediately return if any of I0,I1,I2 are the same insn (I3 can
268738fd1498Szrj      never be).  */
268838fd1498Szrj   if (i1 == i2 || i0 == i2 || (i0 && i0 == i1))
268938fd1498Szrj     return 0;
269038fd1498Szrj 
269138fd1498Szrj   /* Only try four-insn combinations when there's high likelihood of
269238fd1498Szrj      success.  Look for simple insns, such as loads of constants or
269338fd1498Szrj      binary operations involving a constant.  */
269438fd1498Szrj   if (i0)
269538fd1498Szrj     {
269638fd1498Szrj       int i;
269738fd1498Szrj       int ngood = 0;
269838fd1498Szrj       int nshift = 0;
269938fd1498Szrj       rtx set0, set3;
270038fd1498Szrj 
270138fd1498Szrj       if (!flag_expensive_optimizations)
270238fd1498Szrj 	return 0;
270338fd1498Szrj 
270438fd1498Szrj       for (i = 0; i < 4; i++)
270538fd1498Szrj 	{
270638fd1498Szrj 	  rtx_insn *insn = i == 0 ? i0 : i == 1 ? i1 : i == 2 ? i2 : i3;
270738fd1498Szrj 	  rtx set = single_set (insn);
270838fd1498Szrj 	  rtx src;
270938fd1498Szrj 	  if (!set)
271038fd1498Szrj 	    continue;
271138fd1498Szrj 	  src = SET_SRC (set);
271238fd1498Szrj 	  if (CONSTANT_P (src))
271338fd1498Szrj 	    {
271438fd1498Szrj 	      ngood += 2;
271538fd1498Szrj 	      break;
271638fd1498Szrj 	    }
271738fd1498Szrj 	  else if (BINARY_P (src) && CONSTANT_P (XEXP (src, 1)))
271838fd1498Szrj 	    ngood++;
271938fd1498Szrj 	  else if (GET_CODE (src) == ASHIFT || GET_CODE (src) == ASHIFTRT
272038fd1498Szrj 		   || GET_CODE (src) == LSHIFTRT)
272138fd1498Szrj 	    nshift++;
272238fd1498Szrj 	}
272338fd1498Szrj 
272438fd1498Szrj       /* If I0 loads a memory and I3 sets the same memory, then I1 and I2
272538fd1498Szrj 	 are likely manipulating its value.  Ideally we'll be able to combine
272638fd1498Szrj 	 all four insns into a bitfield insertion of some kind.
272738fd1498Szrj 
272838fd1498Szrj 	 Note the source in I0 might be inside a sign/zero extension and the
272938fd1498Szrj 	 memory modes in I0 and I3 might be different.  So extract the address
273038fd1498Szrj 	 from the destination of I3 and search for it in the source of I0.
273138fd1498Szrj 
273238fd1498Szrj 	 In the event that there's a match but the source/dest do not actually
273338fd1498Szrj 	 refer to the same memory, the worst that happens is we try some
273438fd1498Szrj 	 combinations that we wouldn't have otherwise.  */
273538fd1498Szrj       if ((set0 = single_set (i0))
273638fd1498Szrj 	  /* Ensure the source of SET0 is a MEM, possibly buried inside
273738fd1498Szrj 	     an extension.  */
273838fd1498Szrj 	  && (GET_CODE (SET_SRC (set0)) == MEM
273938fd1498Szrj 	      || ((GET_CODE (SET_SRC (set0)) == ZERO_EXTEND
274038fd1498Szrj 		   || GET_CODE (SET_SRC (set0)) == SIGN_EXTEND)
274138fd1498Szrj 		  && GET_CODE (XEXP (SET_SRC (set0), 0)) == MEM))
274238fd1498Szrj 	  && (set3 = single_set (i3))
274338fd1498Szrj 	  /* Ensure the destination of SET3 is a MEM.  */
274438fd1498Szrj 	  && GET_CODE (SET_DEST (set3)) == MEM
274538fd1498Szrj 	  /* Would it be better to extract the base address for the MEM
274638fd1498Szrj 	     in SET3 and look for that?  I don't have cases where it matters
274738fd1498Szrj 	     but I could envision such cases.  */
274838fd1498Szrj 	  && rtx_referenced_p (XEXP (SET_DEST (set3), 0), SET_SRC (set0)))
274938fd1498Szrj 	ngood += 2;
275038fd1498Szrj 
275138fd1498Szrj       if (ngood < 2 && nshift < 2)
275238fd1498Szrj 	return 0;
275338fd1498Szrj     }
275438fd1498Szrj 
275538fd1498Szrj   /* Exit early if one of the insns involved can't be used for
275638fd1498Szrj      combinations.  */
275738fd1498Szrj   if (CALL_P (i2)
275838fd1498Szrj       || (i1 && CALL_P (i1))
275938fd1498Szrj       || (i0 && CALL_P (i0))
276038fd1498Szrj       || cant_combine_insn_p (i3)
276138fd1498Szrj       || cant_combine_insn_p (i2)
276238fd1498Szrj       || (i1 && cant_combine_insn_p (i1))
276338fd1498Szrj       || (i0 && cant_combine_insn_p (i0))
276438fd1498Szrj       || likely_spilled_retval_p (i3))
276538fd1498Szrj     return 0;
276638fd1498Szrj 
276738fd1498Szrj   combine_attempts++;
276838fd1498Szrj   undobuf.other_insn = 0;
276938fd1498Szrj 
277038fd1498Szrj   /* Reset the hard register usage information.  */
277138fd1498Szrj   CLEAR_HARD_REG_SET (newpat_used_regs);
277238fd1498Szrj 
277338fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
277438fd1498Szrj     {
277538fd1498Szrj       if (i0)
277638fd1498Szrj 	fprintf (dump_file, "\nTrying %d, %d, %d -> %d:\n",
277738fd1498Szrj 		 INSN_UID (i0), INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
277838fd1498Szrj       else if (i1)
277938fd1498Szrj 	fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
278038fd1498Szrj 		 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
278138fd1498Szrj       else
278238fd1498Szrj 	fprintf (dump_file, "\nTrying %d -> %d:\n",
278338fd1498Szrj 		 INSN_UID (i2), INSN_UID (i3));
278438fd1498Szrj 
278538fd1498Szrj       if (i0)
278638fd1498Szrj 	dump_insn_slim (dump_file, i0);
278738fd1498Szrj       if (i1)
278838fd1498Szrj 	dump_insn_slim (dump_file, i1);
278938fd1498Szrj       dump_insn_slim (dump_file, i2);
279038fd1498Szrj       dump_insn_slim (dump_file, i3);
279138fd1498Szrj     }
279238fd1498Szrj 
279338fd1498Szrj   /* If multiple insns feed into one of I2 or I3, they can be in any
279438fd1498Szrj      order.  To simplify the code below, reorder them in sequence.  */
279538fd1498Szrj   if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i2))
279638fd1498Szrj     std::swap (i0, i2);
279738fd1498Szrj   if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i1))
279838fd1498Szrj     std::swap (i0, i1);
279938fd1498Szrj   if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
280038fd1498Szrj     std::swap (i1, i2);
280138fd1498Szrj 
280238fd1498Szrj   added_links_insn = 0;
280338fd1498Szrj   added_notes_insn = 0;
280438fd1498Szrj 
280538fd1498Szrj   /* First check for one important special case that the code below will
280638fd1498Szrj      not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
280738fd1498Szrj      and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
280838fd1498Szrj      we may be able to replace that destination with the destination of I3.
280938fd1498Szrj      This occurs in the common code where we compute both a quotient and
281038fd1498Szrj      remainder into a structure, in which case we want to do the computation
281138fd1498Szrj      directly into the structure to avoid register-register copies.
281238fd1498Szrj 
281338fd1498Szrj      Note that this case handles both multiple sets in I2 and also cases
281438fd1498Szrj      where I2 has a number of CLOBBERs inside the PARALLEL.
281538fd1498Szrj 
281638fd1498Szrj      We make very conservative checks below and only try to handle the
281738fd1498Szrj      most common cases of this.  For example, we only handle the case
281838fd1498Szrj      where I2 and I3 are adjacent to avoid making difficult register
281938fd1498Szrj      usage tests.  */
282038fd1498Szrj 
282138fd1498Szrj   if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
282238fd1498Szrj       && REG_P (SET_SRC (PATTERN (i3)))
282338fd1498Szrj       && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
282438fd1498Szrj       && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
282538fd1498Szrj       && GET_CODE (PATTERN (i2)) == PARALLEL
282638fd1498Szrj       && ! side_effects_p (SET_DEST (PATTERN (i3)))
282738fd1498Szrj       /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
282838fd1498Szrj 	 below would need to check what is inside (and reg_overlap_mentioned_p
282938fd1498Szrj 	 doesn't support those codes anyway).  Don't allow those destinations;
283038fd1498Szrj 	 the resulting insn isn't likely to be recognized anyway.  */
283138fd1498Szrj       && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
283238fd1498Szrj       && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
283338fd1498Szrj       && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
283438fd1498Szrj 				    SET_DEST (PATTERN (i3)))
283538fd1498Szrj       && next_active_insn (i2) == i3)
283638fd1498Szrj     {
283738fd1498Szrj       rtx p2 = PATTERN (i2);
283838fd1498Szrj 
283938fd1498Szrj       /* Make sure that the destination of I3,
284038fd1498Szrj 	 which we are going to substitute into one output of I2,
284138fd1498Szrj 	 is not used within another output of I2.  We must avoid making this:
284238fd1498Szrj 	 (parallel [(set (mem (reg 69)) ...)
284338fd1498Szrj 		    (set (reg 69) ...)])
284438fd1498Szrj 	 which is not well-defined as to order of actions.
284538fd1498Szrj 	 (Besides, reload can't handle output reloads for this.)
284638fd1498Szrj 
284738fd1498Szrj 	 The problem can also happen if the dest of I3 is a memory ref,
284838fd1498Szrj 	 if another dest in I2 is an indirect memory ref.
284938fd1498Szrj 
285038fd1498Szrj 	 Neither can this PARALLEL be an asm.  We do not allow combining
285138fd1498Szrj 	 that usually (see can_combine_p), so do not here either.  */
285238fd1498Szrj       bool ok = true;
285338fd1498Szrj       for (i = 0; ok && i < XVECLEN (p2, 0); i++)
285438fd1498Szrj 	{
285538fd1498Szrj 	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
285638fd1498Szrj 	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
285738fd1498Szrj 	      && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
285838fd1498Szrj 					  SET_DEST (XVECEXP (p2, 0, i))))
285938fd1498Szrj 	    ok = false;
286038fd1498Szrj 	  else if (GET_CODE (XVECEXP (p2, 0, i)) == SET
286138fd1498Szrj 		   && GET_CODE (SET_SRC (XVECEXP (p2, 0, i))) == ASM_OPERANDS)
286238fd1498Szrj 	    ok = false;
286338fd1498Szrj 	}
286438fd1498Szrj 
286538fd1498Szrj       if (ok)
286638fd1498Szrj 	for (i = 0; i < XVECLEN (p2, 0); i++)
286738fd1498Szrj 	  if (GET_CODE (XVECEXP (p2, 0, i)) == SET
286838fd1498Szrj 	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
286938fd1498Szrj 	    {
287038fd1498Szrj 	      combine_merges++;
287138fd1498Szrj 
287238fd1498Szrj 	      subst_insn = i3;
287338fd1498Szrj 	      subst_low_luid = DF_INSN_LUID (i2);
287438fd1498Szrj 
287538fd1498Szrj 	      added_sets_2 = added_sets_1 = added_sets_0 = 0;
287638fd1498Szrj 	      i2src = SET_SRC (XVECEXP (p2, 0, i));
287738fd1498Szrj 	      i2dest = SET_DEST (XVECEXP (p2, 0, i));
287838fd1498Szrj 	      i2dest_killed = dead_or_set_p (i2, i2dest);
287938fd1498Szrj 
288038fd1498Szrj 	      /* Replace the dest in I2 with our dest and make the resulting
288138fd1498Szrj 		 insn the new pattern for I3.  Then skip to where we validate
288238fd1498Szrj 		 the pattern.  Everything was set up above.  */
288338fd1498Szrj 	      SUBST (SET_DEST (XVECEXP (p2, 0, i)), SET_DEST (PATTERN (i3)));
288438fd1498Szrj 	      newpat = p2;
288538fd1498Szrj 	      i3_subst_into_i2 = 1;
288638fd1498Szrj 	      goto validate_replacement;
288738fd1498Szrj 	    }
288838fd1498Szrj     }
288938fd1498Szrj 
289038fd1498Szrj   /* If I2 is setting a pseudo to a constant and I3 is setting some
289138fd1498Szrj      sub-part of it to another constant, merge them by making a new
289238fd1498Szrj      constant.  */
289338fd1498Szrj   if (i1 == 0
289438fd1498Szrj       && (temp_expr = single_set (i2)) != 0
289538fd1498Szrj       && is_a <scalar_int_mode> (GET_MODE (SET_DEST (temp_expr)), &temp_mode)
289638fd1498Szrj       && CONST_SCALAR_INT_P (SET_SRC (temp_expr))
289738fd1498Szrj       && GET_CODE (PATTERN (i3)) == SET
289838fd1498Szrj       && CONST_SCALAR_INT_P (SET_SRC (PATTERN (i3)))
289938fd1498Szrj       && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp_expr)))
290038fd1498Szrj     {
290138fd1498Szrj       rtx dest = SET_DEST (PATTERN (i3));
290238fd1498Szrj       rtx temp_dest = SET_DEST (temp_expr);
290338fd1498Szrj       int offset = -1;
290438fd1498Szrj       int width = 0;
290538fd1498Szrj 
290638fd1498Szrj       if (GET_CODE (dest) == ZERO_EXTRACT)
290738fd1498Szrj 	{
290838fd1498Szrj 	  if (CONST_INT_P (XEXP (dest, 1))
290938fd1498Szrj 	      && CONST_INT_P (XEXP (dest, 2))
291038fd1498Szrj 	      && is_a <scalar_int_mode> (GET_MODE (XEXP (dest, 0)),
291138fd1498Szrj 					 &dest_mode))
291238fd1498Szrj 	    {
291338fd1498Szrj 	      width = INTVAL (XEXP (dest, 1));
291438fd1498Szrj 	      offset = INTVAL (XEXP (dest, 2));
291538fd1498Szrj 	      dest = XEXP (dest, 0);
291638fd1498Szrj 	      if (BITS_BIG_ENDIAN)
291738fd1498Szrj 		offset = GET_MODE_PRECISION (dest_mode) - width - offset;
291838fd1498Szrj 	    }
291938fd1498Szrj 	}
292038fd1498Szrj       else
292138fd1498Szrj 	{
292238fd1498Szrj 	  if (GET_CODE (dest) == STRICT_LOW_PART)
292338fd1498Szrj 	    dest = XEXP (dest, 0);
292438fd1498Szrj 	  if (is_a <scalar_int_mode> (GET_MODE (dest), &dest_mode))
292538fd1498Szrj 	    {
292638fd1498Szrj 	      width = GET_MODE_PRECISION (dest_mode);
292738fd1498Szrj 	      offset = 0;
292838fd1498Szrj 	    }
292938fd1498Szrj 	}
293038fd1498Szrj 
293138fd1498Szrj       if (offset >= 0)
293238fd1498Szrj 	{
293338fd1498Szrj 	  /* If this is the low part, we're done.  */
293438fd1498Szrj 	  if (subreg_lowpart_p (dest))
293538fd1498Szrj 	    ;
293638fd1498Szrj 	  /* Handle the case where inner is twice the size of outer.  */
293738fd1498Szrj 	  else if (GET_MODE_PRECISION (temp_mode)
293838fd1498Szrj 		   == 2 * GET_MODE_PRECISION (dest_mode))
293938fd1498Szrj 	    offset += GET_MODE_PRECISION (dest_mode);
294038fd1498Szrj 	  /* Otherwise give up for now.  */
294138fd1498Szrj 	  else
294238fd1498Szrj 	    offset = -1;
294338fd1498Szrj 	}
294438fd1498Szrj 
294538fd1498Szrj       if (offset >= 0)
294638fd1498Szrj 	{
294738fd1498Szrj 	  rtx inner = SET_SRC (PATTERN (i3));
294838fd1498Szrj 	  rtx outer = SET_SRC (temp_expr);
294938fd1498Szrj 
295038fd1498Szrj 	  wide_int o = wi::insert (rtx_mode_t (outer, temp_mode),
295138fd1498Szrj 				   rtx_mode_t (inner, dest_mode),
295238fd1498Szrj 				   offset, width);
295338fd1498Szrj 
295438fd1498Szrj 	  combine_merges++;
295538fd1498Szrj 	  subst_insn = i3;
295638fd1498Szrj 	  subst_low_luid = DF_INSN_LUID (i2);
295738fd1498Szrj 	  added_sets_2 = added_sets_1 = added_sets_0 = 0;
295838fd1498Szrj 	  i2dest = temp_dest;
295938fd1498Szrj 	  i2dest_killed = dead_or_set_p (i2, i2dest);
296038fd1498Szrj 
296138fd1498Szrj 	  /* Replace the source in I2 with the new constant and make the
296238fd1498Szrj 	     resulting insn the new pattern for I3.  Then skip to where we
296338fd1498Szrj 	     validate the pattern.  Everything was set up above.  */
296438fd1498Szrj 	  SUBST (SET_SRC (temp_expr),
296538fd1498Szrj 		 immed_wide_int_const (o, temp_mode));
296638fd1498Szrj 
296738fd1498Szrj 	  newpat = PATTERN (i2);
296838fd1498Szrj 
296938fd1498Szrj           /* The dest of I3 has been replaced with the dest of I2.  */
297038fd1498Szrj           changed_i3_dest = 1;
297138fd1498Szrj 	  goto validate_replacement;
297238fd1498Szrj 	}
297338fd1498Szrj     }
297438fd1498Szrj 
297538fd1498Szrj   /* If we have no I1 and I2 looks like:
297638fd1498Szrj 	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
297738fd1498Szrj 		   (set Y OP)])
297838fd1498Szrj      make up a dummy I1 that is
297938fd1498Szrj 	(set Y OP)
298038fd1498Szrj      and change I2 to be
298138fd1498Szrj 	(set (reg:CC X) (compare:CC Y (const_int 0)))
298238fd1498Szrj 
298338fd1498Szrj      (We can ignore any trailing CLOBBERs.)
298438fd1498Szrj 
298538fd1498Szrj      This undoes a previous combination and allows us to match a branch-and-
298638fd1498Szrj      decrement insn.  */
298738fd1498Szrj 
298838fd1498Szrj   if (!HAVE_cc0 && i1 == 0
298938fd1498Szrj       && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
299038fd1498Szrj       && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
299138fd1498Szrj 	  == MODE_CC)
299238fd1498Szrj       && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
299338fd1498Szrj       && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
299438fd1498Szrj       && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
299538fd1498Szrj 		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1)))
299638fd1498Szrj       && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
299738fd1498Szrj       && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3))
299838fd1498Szrj     {
299938fd1498Szrj       /* We make I1 with the same INSN_UID as I2.  This gives it
300038fd1498Szrj 	 the same DF_INSN_LUID for value tracking.  Our fake I1 will
300138fd1498Szrj 	 never appear in the insn stream so giving it the same INSN_UID
300238fd1498Szrj 	 as I2 will not cause a problem.  */
300338fd1498Szrj 
300438fd1498Szrj       i1 = gen_rtx_INSN (VOIDmode, NULL, i2, BLOCK_FOR_INSN (i2),
300538fd1498Szrj 			 XVECEXP (PATTERN (i2), 0, 1), INSN_LOCATION (i2),
300638fd1498Szrj 			 -1, NULL_RTX);
300738fd1498Szrj       INSN_UID (i1) = INSN_UID (i2);
300838fd1498Szrj 
300938fd1498Szrj       SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
301038fd1498Szrj       SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
301138fd1498Szrj 	     SET_DEST (PATTERN (i1)));
301238fd1498Szrj       unsigned int regno = REGNO (SET_DEST (PATTERN (i1)));
301338fd1498Szrj       SUBST_LINK (LOG_LINKS (i2),
301438fd1498Szrj 		  alloc_insn_link (i1, regno, LOG_LINKS (i2)));
301538fd1498Szrj     }
301638fd1498Szrj 
301738fd1498Szrj   /* If I2 is a PARALLEL of two SETs of REGs (and perhaps some CLOBBERs),
301838fd1498Szrj      make those two SETs separate I1 and I2 insns, and make an I0 that is
301938fd1498Szrj      the original I1.  */
302038fd1498Szrj   if (!HAVE_cc0 && i0 == 0
302138fd1498Szrj       && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
302238fd1498Szrj       && can_split_parallel_of_n_reg_sets (i2, 2)
302338fd1498Szrj       && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
302438fd1498Szrj       && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3)
302538fd1498Szrj       && !reg_set_between_p  (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
302638fd1498Szrj       && !reg_set_between_p  (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3))
302738fd1498Szrj     {
302838fd1498Szrj       /* If there is no I1, there is no I0 either.  */
302938fd1498Szrj       i0 = i1;
303038fd1498Szrj 
303138fd1498Szrj       /* We make I1 with the same INSN_UID as I2.  This gives it
303238fd1498Szrj 	 the same DF_INSN_LUID for value tracking.  Our fake I1 will
303338fd1498Szrj 	 never appear in the insn stream so giving it the same INSN_UID
303438fd1498Szrj 	 as I2 will not cause a problem.  */
303538fd1498Szrj 
303638fd1498Szrj       i1 = gen_rtx_INSN (VOIDmode, NULL, i2, BLOCK_FOR_INSN (i2),
303738fd1498Szrj 			 XVECEXP (PATTERN (i2), 0, 0), INSN_LOCATION (i2),
303838fd1498Szrj 			 -1, NULL_RTX);
303938fd1498Szrj       INSN_UID (i1) = INSN_UID (i2);
304038fd1498Szrj 
304138fd1498Szrj       SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 1));
304238fd1498Szrj     }
304338fd1498Szrj 
304438fd1498Szrj   /* Verify that I2 and maybe I1 and I0 can be combined into I3.  */
304538fd1498Szrj   if (!can_combine_p (i2, i3, i0, i1, NULL, NULL, &i2dest, &i2src))
304638fd1498Szrj     {
304738fd1498Szrj       if (dump_file)
304838fd1498Szrj 	fprintf (dump_file, "Can't combine i2 into i3\n");
304938fd1498Szrj       undo_all ();
305038fd1498Szrj       return 0;
305138fd1498Szrj     }
305238fd1498Szrj   if (i1 && !can_combine_p (i1, i3, i0, NULL, i2, NULL, &i1dest, &i1src))
305338fd1498Szrj     {
305438fd1498Szrj       if (dump_file)
305538fd1498Szrj 	fprintf (dump_file, "Can't combine i1 into i3\n");
305638fd1498Szrj       undo_all ();
305738fd1498Szrj       return 0;
305838fd1498Szrj     }
305938fd1498Szrj   if (i0 && !can_combine_p (i0, i3, NULL, NULL, i1, i2, &i0dest, &i0src))
306038fd1498Szrj     {
306138fd1498Szrj       if (dump_file)
306238fd1498Szrj 	fprintf (dump_file, "Can't combine i0 into i3\n");
306338fd1498Szrj       undo_all ();
306438fd1498Szrj       return 0;
306538fd1498Szrj     }
306638fd1498Szrj 
306738fd1498Szrj   /* Record whether I2DEST is used in I2SRC and similarly for the other
306838fd1498Szrj      cases.  Knowing this will help in register status updating below.  */
306938fd1498Szrj   i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
307038fd1498Szrj   i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
307138fd1498Szrj   i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
307238fd1498Szrj   i0dest_in_i0src = i0 && reg_overlap_mentioned_p (i0dest, i0src);
307338fd1498Szrj   i1dest_in_i0src = i0 && reg_overlap_mentioned_p (i1dest, i0src);
307438fd1498Szrj   i2dest_in_i0src = i0 && reg_overlap_mentioned_p (i2dest, i0src);
307538fd1498Szrj   i2dest_killed = dead_or_set_p (i2, i2dest);
307638fd1498Szrj   i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
307738fd1498Szrj   i0dest_killed = i0 && dead_or_set_p (i0, i0dest);
307838fd1498Szrj 
307938fd1498Szrj   /* For the earlier insns, determine which of the subsequent ones they
308038fd1498Szrj      feed.  */
308138fd1498Szrj   i1_feeds_i2_n = i1 && insn_a_feeds_b (i1, i2);
308238fd1498Szrj   i0_feeds_i1_n = i0 && insn_a_feeds_b (i0, i1);
308338fd1498Szrj   i0_feeds_i2_n = (i0 && (!i0_feeds_i1_n ? insn_a_feeds_b (i0, i2)
308438fd1498Szrj 			  : (!reg_overlap_mentioned_p (i1dest, i0dest)
308538fd1498Szrj 			     && reg_overlap_mentioned_p (i0dest, i2src))));
308638fd1498Szrj 
308738fd1498Szrj   /* Ensure that I3's pattern can be the destination of combines.  */
308838fd1498Szrj   if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, i0dest,
308938fd1498Szrj 			  i1 && i2dest_in_i1src && !i1_feeds_i2_n,
309038fd1498Szrj 			  i0 && ((i2dest_in_i0src && !i0_feeds_i2_n)
309138fd1498Szrj 				 || (i1dest_in_i0src && !i0_feeds_i1_n)),
309238fd1498Szrj 			  &i3dest_killed))
309338fd1498Szrj     {
309438fd1498Szrj       undo_all ();
309538fd1498Szrj       return 0;
309638fd1498Szrj     }
309738fd1498Szrj 
309838fd1498Szrj   /* See if any of the insns is a MULT operation.  Unless one is, we will
309938fd1498Szrj      reject a combination that is, since it must be slower.  Be conservative
310038fd1498Szrj      here.  */
310138fd1498Szrj   if (GET_CODE (i2src) == MULT
310238fd1498Szrj       || (i1 != 0 && GET_CODE (i1src) == MULT)
310338fd1498Szrj       || (i0 != 0 && GET_CODE (i0src) == MULT)
310438fd1498Szrj       || (GET_CODE (PATTERN (i3)) == SET
310538fd1498Szrj 	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
310638fd1498Szrj     have_mult = 1;
310738fd1498Szrj 
310838fd1498Szrj   /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
310938fd1498Szrj      We used to do this EXCEPT in one case: I3 has a post-inc in an
311038fd1498Szrj      output operand.  However, that exception can give rise to insns like
311138fd1498Szrj 	mov r3,(r3)+
311238fd1498Szrj      which is a famous insn on the PDP-11 where the value of r3 used as the
311338fd1498Szrj      source was model-dependent.  Avoid this sort of thing.  */
311438fd1498Szrj 
311538fd1498Szrj #if 0
311638fd1498Szrj   if (!(GET_CODE (PATTERN (i3)) == SET
311738fd1498Szrj 	&& REG_P (SET_SRC (PATTERN (i3)))
311838fd1498Szrj 	&& MEM_P (SET_DEST (PATTERN (i3)))
311938fd1498Szrj 	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
312038fd1498Szrj 	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
312138fd1498Szrj     /* It's not the exception.  */
312238fd1498Szrj #endif
312338fd1498Szrj     if (AUTO_INC_DEC)
312438fd1498Szrj       {
312538fd1498Szrj 	rtx link;
312638fd1498Szrj 	for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
312738fd1498Szrj 	  if (REG_NOTE_KIND (link) == REG_INC
312838fd1498Szrj 	      && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
312938fd1498Szrj 		  || (i1 != 0
313038fd1498Szrj 		      && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
313138fd1498Szrj 	    {
313238fd1498Szrj 	      undo_all ();
313338fd1498Szrj 	      return 0;
313438fd1498Szrj 	    }
313538fd1498Szrj       }
313638fd1498Szrj 
313738fd1498Szrj   /* See if the SETs in I1 or I2 need to be kept around in the merged
313838fd1498Szrj      instruction: whenever the value set there is still needed past I3.
313938fd1498Szrj      For the SET in I2, this is easy: we see if I2DEST dies or is set in I3.
314038fd1498Szrj 
314138fd1498Szrj      For the SET in I1, we have two cases: if I1 and I2 independently feed
314238fd1498Szrj      into I3, the set in I1 needs to be kept around unless I1DEST dies
314338fd1498Szrj      or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
314438fd1498Szrj      in I1 needs to be kept around unless I1DEST dies or is set in either
314538fd1498Szrj      I2 or I3.  The same considerations apply to I0.  */
314638fd1498Szrj 
314738fd1498Szrj   added_sets_2 = !dead_or_set_p (i3, i2dest);
314838fd1498Szrj 
314938fd1498Szrj   if (i1)
315038fd1498Szrj     added_sets_1 = !(dead_or_set_p (i3, i1dest)
315138fd1498Szrj 		     || (i1_feeds_i2_n && dead_or_set_p (i2, i1dest)));
315238fd1498Szrj   else
315338fd1498Szrj     added_sets_1 = 0;
315438fd1498Szrj 
315538fd1498Szrj   if (i0)
315638fd1498Szrj     added_sets_0 =  !(dead_or_set_p (i3, i0dest)
315738fd1498Szrj 		      || (i0_feeds_i1_n && dead_or_set_p (i1, i0dest))
315838fd1498Szrj 		      || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
315938fd1498Szrj 			  && dead_or_set_p (i2, i0dest)));
316038fd1498Szrj   else
316138fd1498Szrj     added_sets_0 = 0;
316238fd1498Szrj 
316338fd1498Szrj   /* We are about to copy insns for the case where they need to be kept
316438fd1498Szrj      around.  Check that they can be copied in the merged instruction.  */
316538fd1498Szrj 
316638fd1498Szrj   if (targetm.cannot_copy_insn_p
316738fd1498Szrj       && ((added_sets_2 && targetm.cannot_copy_insn_p (i2))
316838fd1498Szrj 	  || (i1 && added_sets_1 && targetm.cannot_copy_insn_p (i1))
316938fd1498Szrj 	  || (i0 && added_sets_0 && targetm.cannot_copy_insn_p (i0))))
317038fd1498Szrj     {
317138fd1498Szrj       undo_all ();
317238fd1498Szrj       return 0;
317338fd1498Szrj     }
317438fd1498Szrj 
317538fd1498Szrj   /* If the set in I2 needs to be kept around, we must make a copy of
317638fd1498Szrj      PATTERN (I2), so that when we substitute I1SRC for I1DEST in
317738fd1498Szrj      PATTERN (I2), we are only substituting for the original I1DEST, not into
317838fd1498Szrj      an already-substituted copy.  This also prevents making self-referential
317938fd1498Szrj      rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
318038fd1498Szrj      I2DEST.  */
318138fd1498Szrj 
318238fd1498Szrj   if (added_sets_2)
318338fd1498Szrj     {
318438fd1498Szrj       if (GET_CODE (PATTERN (i2)) == PARALLEL)
318538fd1498Szrj 	i2pat = gen_rtx_SET (i2dest, copy_rtx (i2src));
318638fd1498Szrj       else
318738fd1498Szrj 	i2pat = copy_rtx (PATTERN (i2));
318838fd1498Szrj     }
318938fd1498Szrj 
319038fd1498Szrj   if (added_sets_1)
319138fd1498Szrj     {
319238fd1498Szrj       if (GET_CODE (PATTERN (i1)) == PARALLEL)
319338fd1498Szrj 	i1pat = gen_rtx_SET (i1dest, copy_rtx (i1src));
319438fd1498Szrj       else
319538fd1498Szrj 	i1pat = copy_rtx (PATTERN (i1));
319638fd1498Szrj     }
319738fd1498Szrj 
319838fd1498Szrj   if (added_sets_0)
319938fd1498Szrj     {
320038fd1498Szrj       if (GET_CODE (PATTERN (i0)) == PARALLEL)
320138fd1498Szrj 	i0pat = gen_rtx_SET (i0dest, copy_rtx (i0src));
320238fd1498Szrj       else
320338fd1498Szrj 	i0pat = copy_rtx (PATTERN (i0));
320438fd1498Szrj     }
320538fd1498Szrj 
320638fd1498Szrj   combine_merges++;
320738fd1498Szrj 
320838fd1498Szrj   /* Substitute in the latest insn for the regs set by the earlier ones.  */
320938fd1498Szrj 
321038fd1498Szrj   maxreg = max_reg_num ();
321138fd1498Szrj 
321238fd1498Szrj   subst_insn = i3;
321338fd1498Szrj 
321438fd1498Szrj   /* Many machines that don't use CC0 have insns that can both perform an
321538fd1498Szrj      arithmetic operation and set the condition code.  These operations will
321638fd1498Szrj      be represented as a PARALLEL with the first element of the vector
321738fd1498Szrj      being a COMPARE of an arithmetic operation with the constant zero.
321838fd1498Szrj      The second element of the vector will set some pseudo to the result
321938fd1498Szrj      of the same arithmetic operation.  If we simplify the COMPARE, we won't
322038fd1498Szrj      match such a pattern and so will generate an extra insn.   Here we test
322138fd1498Szrj      for this case, where both the comparison and the operation result are
322238fd1498Szrj      needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
322338fd1498Szrj      I2SRC.  Later we will make the PARALLEL that contains I2.  */
322438fd1498Szrj 
322538fd1498Szrj   if (!HAVE_cc0 && i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
322638fd1498Szrj       && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
322738fd1498Szrj       && CONST_INT_P (XEXP (SET_SRC (PATTERN (i3)), 1))
322838fd1498Szrj       && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
322938fd1498Szrj     {
323038fd1498Szrj       rtx newpat_dest;
323138fd1498Szrj       rtx *cc_use_loc = NULL;
323238fd1498Szrj       rtx_insn *cc_use_insn = NULL;
323338fd1498Szrj       rtx op0 = i2src, op1 = XEXP (SET_SRC (PATTERN (i3)), 1);
323438fd1498Szrj       machine_mode compare_mode, orig_compare_mode;
323538fd1498Szrj       enum rtx_code compare_code = UNKNOWN, orig_compare_code = UNKNOWN;
323638fd1498Szrj       scalar_int_mode mode;
323738fd1498Szrj 
323838fd1498Szrj       newpat = PATTERN (i3);
323938fd1498Szrj       newpat_dest = SET_DEST (newpat);
324038fd1498Szrj       compare_mode = orig_compare_mode = GET_MODE (newpat_dest);
324138fd1498Szrj 
324238fd1498Szrj       if (undobuf.other_insn == 0
324338fd1498Szrj 	  && (cc_use_loc = find_single_use (SET_DEST (newpat), i3,
324438fd1498Szrj 					    &cc_use_insn)))
324538fd1498Szrj 	{
324638fd1498Szrj 	  compare_code = orig_compare_code = GET_CODE (*cc_use_loc);
324738fd1498Szrj 	  if (is_a <scalar_int_mode> (GET_MODE (i2dest), &mode))
324838fd1498Szrj 	    compare_code = simplify_compare_const (compare_code, mode,
324938fd1498Szrj 						   op0, &op1);
325038fd1498Szrj 	  target_canonicalize_comparison (&compare_code, &op0, &op1, 1);
325138fd1498Szrj 	}
325238fd1498Szrj 
325338fd1498Szrj       /* Do the rest only if op1 is const0_rtx, which may be the
325438fd1498Szrj 	 result of simplification.  */
325538fd1498Szrj       if (op1 == const0_rtx)
325638fd1498Szrj 	{
325738fd1498Szrj 	  /* If a single use of the CC is found, prepare to modify it
325838fd1498Szrj 	     when SELECT_CC_MODE returns a new CC-class mode, or when
325938fd1498Szrj 	     the above simplify_compare_const() returned a new comparison
326038fd1498Szrj 	     operator.  undobuf.other_insn is assigned the CC use insn
326138fd1498Szrj 	     when modifying it.  */
326238fd1498Szrj 	  if (cc_use_loc)
326338fd1498Szrj 	    {
326438fd1498Szrj #ifdef SELECT_CC_MODE
326538fd1498Szrj 	      machine_mode new_mode
326638fd1498Szrj 		= SELECT_CC_MODE (compare_code, op0, op1);
326738fd1498Szrj 	      if (new_mode != orig_compare_mode
326838fd1498Szrj 		  && can_change_dest_mode (SET_DEST (newpat),
326938fd1498Szrj 					   added_sets_2, new_mode))
327038fd1498Szrj 		{
327138fd1498Szrj 		  unsigned int regno = REGNO (newpat_dest);
327238fd1498Szrj 		  compare_mode = new_mode;
327338fd1498Szrj 		  if (regno < FIRST_PSEUDO_REGISTER)
327438fd1498Szrj 		    newpat_dest = gen_rtx_REG (compare_mode, regno);
327538fd1498Szrj 		  else
327638fd1498Szrj 		    {
327738fd1498Szrj 		      SUBST_MODE (regno_reg_rtx[regno], compare_mode);
327838fd1498Szrj 		      newpat_dest = regno_reg_rtx[regno];
327938fd1498Szrj 		    }
328038fd1498Szrj 		}
328138fd1498Szrj #endif
328238fd1498Szrj 	      /* Cases for modifying the CC-using comparison.  */
328338fd1498Szrj 	      if (compare_code != orig_compare_code
328438fd1498Szrj 		  /* ??? Do we need to verify the zero rtx?  */
328538fd1498Szrj 		  && XEXP (*cc_use_loc, 1) == const0_rtx)
328638fd1498Szrj 		{
328738fd1498Szrj 		  /* Replace cc_use_loc with entire new RTX.  */
328838fd1498Szrj 		  SUBST (*cc_use_loc,
328938fd1498Szrj 			 gen_rtx_fmt_ee (compare_code, compare_mode,
329038fd1498Szrj 					 newpat_dest, const0_rtx));
329138fd1498Szrj 		  undobuf.other_insn = cc_use_insn;
329238fd1498Szrj 		}
329338fd1498Szrj 	      else if (compare_mode != orig_compare_mode)
329438fd1498Szrj 		{
329538fd1498Szrj 		  /* Just replace the CC reg with a new mode.  */
329638fd1498Szrj 		  SUBST (XEXP (*cc_use_loc, 0), newpat_dest);
329738fd1498Szrj 		  undobuf.other_insn = cc_use_insn;
329838fd1498Szrj 		}
329938fd1498Szrj 	    }
330038fd1498Szrj 
330138fd1498Szrj 	  /* Now we modify the current newpat:
330238fd1498Szrj 	     First, SET_DEST(newpat) is updated if the CC mode has been
330338fd1498Szrj 	     altered. For targets without SELECT_CC_MODE, this should be
330438fd1498Szrj 	     optimized away.  */
330538fd1498Szrj 	  if (compare_mode != orig_compare_mode)
330638fd1498Szrj 	    SUBST (SET_DEST (newpat), newpat_dest);
330738fd1498Szrj 	  /* This is always done to propagate i2src into newpat.  */
330838fd1498Szrj 	  SUBST (SET_SRC (newpat),
330938fd1498Szrj 		 gen_rtx_COMPARE (compare_mode, op0, op1));
331038fd1498Szrj 	  /* Create new version of i2pat if needed; the below PARALLEL
331138fd1498Szrj 	     creation needs this to work correctly.  */
331238fd1498Szrj 	  if (! rtx_equal_p (i2src, op0))
331338fd1498Szrj 	    i2pat = gen_rtx_SET (i2dest, op0);
331438fd1498Szrj 	  i2_is_used = 1;
331538fd1498Szrj 	}
331638fd1498Szrj     }
331738fd1498Szrj 
331838fd1498Szrj   if (i2_is_used == 0)
331938fd1498Szrj     {
332038fd1498Szrj       /* It is possible that the source of I2 or I1 may be performing
332138fd1498Szrj 	 an unneeded operation, such as a ZERO_EXTEND of something
332238fd1498Szrj 	 that is known to have the high part zero.  Handle that case
332338fd1498Szrj 	 by letting subst look at the inner insns.
332438fd1498Szrj 
332538fd1498Szrj 	 Another way to do this would be to have a function that tries
332638fd1498Szrj 	 to simplify a single insn instead of merging two or more
332738fd1498Szrj 	 insns.  We don't do this because of the potential of infinite
332838fd1498Szrj 	 loops and because of the potential extra memory required.
332938fd1498Szrj 	 However, doing it the way we are is a bit of a kludge and
333038fd1498Szrj 	 doesn't catch all cases.
333138fd1498Szrj 
333238fd1498Szrj 	 But only do this if -fexpensive-optimizations since it slows
333338fd1498Szrj 	 things down and doesn't usually win.
333438fd1498Szrj 
333538fd1498Szrj 	 This is not done in the COMPARE case above because the
333638fd1498Szrj 	 unmodified I2PAT is used in the PARALLEL and so a pattern
333738fd1498Szrj 	 with a modified I2SRC would not match.  */
333838fd1498Szrj 
333938fd1498Szrj       if (flag_expensive_optimizations)
334038fd1498Szrj 	{
334138fd1498Szrj 	  /* Pass pc_rtx so no substitutions are done, just
334238fd1498Szrj 	     simplifications.  */
334338fd1498Szrj 	  if (i1)
334438fd1498Szrj 	    {
334538fd1498Szrj 	      subst_low_luid = DF_INSN_LUID (i1);
334638fd1498Szrj 	      i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0, 0);
334738fd1498Szrj 	    }
334838fd1498Szrj 
334938fd1498Szrj 	  subst_low_luid = DF_INSN_LUID (i2);
335038fd1498Szrj 	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0, 0);
335138fd1498Szrj 	}
335238fd1498Szrj 
335338fd1498Szrj       n_occurrences = 0;		/* `subst' counts here */
335438fd1498Szrj       subst_low_luid = DF_INSN_LUID (i2);
335538fd1498Szrj 
335638fd1498Szrj       /* If I1 feeds into I2 and I1DEST is in I1SRC, we need to make a unique
335738fd1498Szrj 	 copy of I2SRC each time we substitute it, in order to avoid creating
335838fd1498Szrj 	 self-referential RTL when we will be substituting I1SRC for I1DEST
335938fd1498Szrj 	 later.  Likewise if I0 feeds into I2, either directly or indirectly
336038fd1498Szrj 	 through I1, and I0DEST is in I0SRC.  */
336138fd1498Szrj       newpat = subst (PATTERN (i3), i2dest, i2src, 0, 0,
336238fd1498Szrj 		      (i1_feeds_i2_n && i1dest_in_i1src)
336338fd1498Szrj 		      || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
336438fd1498Szrj 			  && i0dest_in_i0src));
336538fd1498Szrj       substed_i2 = 1;
336638fd1498Szrj 
336738fd1498Szrj       /* Record whether I2's body now appears within I3's body.  */
336838fd1498Szrj       i2_is_used = n_occurrences;
336938fd1498Szrj     }
337038fd1498Szrj 
337138fd1498Szrj   /* If we already got a failure, don't try to do more.  Otherwise, try to
337238fd1498Szrj      substitute I1 if we have it.  */
337338fd1498Szrj 
337438fd1498Szrj   if (i1 && GET_CODE (newpat) != CLOBBER)
337538fd1498Szrj     {
337638fd1498Szrj       /* Check that an autoincrement side-effect on I1 has not been lost.
337738fd1498Szrj 	 This happens if I1DEST is mentioned in I2 and dies there, and
337838fd1498Szrj 	 has disappeared from the new pattern.  */
337938fd1498Szrj       if ((FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
338038fd1498Szrj 	   && i1_feeds_i2_n
338138fd1498Szrj 	   && dead_or_set_p (i2, i1dest)
338238fd1498Szrj 	   && !reg_overlap_mentioned_p (i1dest, newpat))
338338fd1498Szrj 	   /* Before we can do this substitution, we must redo the test done
338438fd1498Szrj 	      above (see detailed comments there) that ensures I1DEST isn't
338538fd1498Szrj 	      mentioned in any SETs in NEWPAT that are field assignments.  */
338638fd1498Szrj 	  || !combinable_i3pat (NULL, &newpat, i1dest, NULL_RTX, NULL_RTX,
338738fd1498Szrj 				0, 0, 0))
338838fd1498Szrj 	{
338938fd1498Szrj 	  undo_all ();
339038fd1498Szrj 	  return 0;
339138fd1498Szrj 	}
339238fd1498Szrj 
339338fd1498Szrj       n_occurrences = 0;
339438fd1498Szrj       subst_low_luid = DF_INSN_LUID (i1);
339538fd1498Szrj 
339638fd1498Szrj       /* If the following substitution will modify I1SRC, make a copy of it
339738fd1498Szrj 	 for the case where it is substituted for I1DEST in I2PAT later.  */
339838fd1498Szrj       if (added_sets_2 && i1_feeds_i2_n)
339938fd1498Szrj 	i1src_copy = copy_rtx (i1src);
340038fd1498Szrj 
340138fd1498Szrj       /* If I0 feeds into I1 and I0DEST is in I0SRC, we need to make a unique
340238fd1498Szrj 	 copy of I1SRC each time we substitute it, in order to avoid creating
340338fd1498Szrj 	 self-referential RTL when we will be substituting I0SRC for I0DEST
340438fd1498Szrj 	 later.  */
340538fd1498Szrj       newpat = subst (newpat, i1dest, i1src, 0, 0,
340638fd1498Szrj 		      i0_feeds_i1_n && i0dest_in_i0src);
340738fd1498Szrj       substed_i1 = 1;
340838fd1498Szrj 
340938fd1498Szrj       /* Record whether I1's body now appears within I3's body.  */
341038fd1498Szrj       i1_is_used = n_occurrences;
341138fd1498Szrj     }
341238fd1498Szrj 
341338fd1498Szrj   /* Likewise for I0 if we have it.  */
341438fd1498Szrj 
341538fd1498Szrj   if (i0 && GET_CODE (newpat) != CLOBBER)
341638fd1498Szrj     {
341738fd1498Szrj       if ((FIND_REG_INC_NOTE (i0, NULL_RTX) != 0
341838fd1498Szrj 	   && ((i0_feeds_i2_n && dead_or_set_p (i2, i0dest))
341938fd1498Szrj 	       || (i0_feeds_i1_n && dead_or_set_p (i1, i0dest)))
342038fd1498Szrj 	   && !reg_overlap_mentioned_p (i0dest, newpat))
342138fd1498Szrj 	  || !combinable_i3pat (NULL, &newpat, i0dest, NULL_RTX, NULL_RTX,
342238fd1498Szrj 				0, 0, 0))
342338fd1498Szrj 	{
342438fd1498Szrj 	  undo_all ();
342538fd1498Szrj 	  return 0;
342638fd1498Szrj 	}
342738fd1498Szrj 
342838fd1498Szrj       /* If the following substitution will modify I0SRC, make a copy of it
342938fd1498Szrj 	 for the case where it is substituted for I0DEST in I1PAT later.  */
343038fd1498Szrj       if (added_sets_1 && i0_feeds_i1_n)
343138fd1498Szrj 	i0src_copy = copy_rtx (i0src);
343238fd1498Szrj       /* And a copy for I0DEST in I2PAT substitution.  */
343338fd1498Szrj       if (added_sets_2 && ((i0_feeds_i1_n && i1_feeds_i2_n)
343438fd1498Szrj 			   || (i0_feeds_i2_n)))
343538fd1498Szrj 	i0src_copy2 = copy_rtx (i0src);
343638fd1498Szrj 
343738fd1498Szrj       n_occurrences = 0;
343838fd1498Szrj       subst_low_luid = DF_INSN_LUID (i0);
343938fd1498Szrj       newpat = subst (newpat, i0dest, i0src, 0, 0, 0);
344038fd1498Szrj       substed_i0 = 1;
344138fd1498Szrj     }
344238fd1498Szrj 
344338fd1498Szrj   /* Fail if an autoincrement side-effect has been duplicated.  Be careful
344438fd1498Szrj      to count all the ways that I2SRC and I1SRC can be used.  */
344538fd1498Szrj   if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
344638fd1498Szrj        && i2_is_used + added_sets_2 > 1)
344738fd1498Szrj       || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
344838fd1498Szrj 	  && (i1_is_used + added_sets_1 + (added_sets_2 && i1_feeds_i2_n)
344938fd1498Szrj 	      > 1))
345038fd1498Szrj       || (i0 != 0 && FIND_REG_INC_NOTE (i0, NULL_RTX) != 0
345138fd1498Szrj 	  && (n_occurrences + added_sets_0
345238fd1498Szrj 	      + (added_sets_1 && i0_feeds_i1_n)
345338fd1498Szrj 	      + (added_sets_2 && i0_feeds_i2_n)
345438fd1498Szrj 	      > 1))
345538fd1498Szrj       /* Fail if we tried to make a new register.  */
345638fd1498Szrj       || max_reg_num () != maxreg
345738fd1498Szrj       /* Fail if we couldn't do something and have a CLOBBER.  */
345838fd1498Szrj       || GET_CODE (newpat) == CLOBBER
345938fd1498Szrj       /* Fail if this new pattern is a MULT and we didn't have one before
346038fd1498Szrj 	 at the outer level.  */
346138fd1498Szrj       || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
346238fd1498Szrj 	  && ! have_mult))
346338fd1498Szrj     {
346438fd1498Szrj       undo_all ();
346538fd1498Szrj       return 0;
346638fd1498Szrj     }
346738fd1498Szrj 
346838fd1498Szrj   /* If the actions of the earlier insns must be kept
346938fd1498Szrj      in addition to substituting them into the latest one,
347038fd1498Szrj      we must make a new PARALLEL for the latest insn
347138fd1498Szrj      to hold additional the SETs.  */
347238fd1498Szrj 
347338fd1498Szrj   if (added_sets_0 || added_sets_1 || added_sets_2)
347438fd1498Szrj     {
347538fd1498Szrj       int extra_sets = added_sets_0 + added_sets_1 + added_sets_2;
347638fd1498Szrj       combine_extras++;
347738fd1498Szrj 
347838fd1498Szrj       if (GET_CODE (newpat) == PARALLEL)
347938fd1498Szrj 	{
348038fd1498Szrj 	  rtvec old = XVEC (newpat, 0);
348138fd1498Szrj 	  total_sets = XVECLEN (newpat, 0) + extra_sets;
348238fd1498Szrj 	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
348338fd1498Szrj 	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
348438fd1498Szrj 		  sizeof (old->elem[0]) * old->num_elem);
348538fd1498Szrj 	}
348638fd1498Szrj       else
348738fd1498Szrj 	{
348838fd1498Szrj 	  rtx old = newpat;
348938fd1498Szrj 	  total_sets = 1 + extra_sets;
349038fd1498Szrj 	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
349138fd1498Szrj 	  XVECEXP (newpat, 0, 0) = old;
349238fd1498Szrj 	}
349338fd1498Szrj 
349438fd1498Szrj       if (added_sets_0)
349538fd1498Szrj 	XVECEXP (newpat, 0, --total_sets) = i0pat;
349638fd1498Szrj 
349738fd1498Szrj       if (added_sets_1)
349838fd1498Szrj 	{
349938fd1498Szrj 	  rtx t = i1pat;
350038fd1498Szrj 	  if (i0_feeds_i1_n)
350138fd1498Szrj 	    t = subst (t, i0dest, i0src_copy ? i0src_copy : i0src, 0, 0, 0);
350238fd1498Szrj 
350338fd1498Szrj 	  XVECEXP (newpat, 0, --total_sets) = t;
350438fd1498Szrj 	}
350538fd1498Szrj       if (added_sets_2)
350638fd1498Szrj 	{
350738fd1498Szrj 	  rtx t = i2pat;
350838fd1498Szrj 	  if (i1_feeds_i2_n)
350938fd1498Szrj 	    t = subst (t, i1dest, i1src_copy ? i1src_copy : i1src, 0, 0,
351038fd1498Szrj 		       i0_feeds_i1_n && i0dest_in_i0src);
351138fd1498Szrj 	  if ((i0_feeds_i1_n && i1_feeds_i2_n) || i0_feeds_i2_n)
351238fd1498Szrj 	    t = subst (t, i0dest, i0src_copy2 ? i0src_copy2 : i0src, 0, 0, 0);
351338fd1498Szrj 
351438fd1498Szrj 	  XVECEXP (newpat, 0, --total_sets) = t;
351538fd1498Szrj 	}
351638fd1498Szrj     }
351738fd1498Szrj 
351838fd1498Szrj  validate_replacement:
351938fd1498Szrj 
352038fd1498Szrj   /* Note which hard regs this insn has as inputs.  */
352138fd1498Szrj   mark_used_regs_combine (newpat);
352238fd1498Szrj 
352338fd1498Szrj   /* If recog_for_combine fails, it strips existing clobbers.  If we'll
352438fd1498Szrj      consider splitting this pattern, we might need these clobbers.  */
352538fd1498Szrj   if (i1 && GET_CODE (newpat) == PARALLEL
352638fd1498Szrj       && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
352738fd1498Szrj     {
352838fd1498Szrj       int len = XVECLEN (newpat, 0);
352938fd1498Szrj 
353038fd1498Szrj       newpat_vec_with_clobbers = rtvec_alloc (len);
353138fd1498Szrj       for (i = 0; i < len; i++)
353238fd1498Szrj 	RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
353338fd1498Szrj     }
353438fd1498Szrj 
353538fd1498Szrj   /* We have recognized nothing yet.  */
353638fd1498Szrj   insn_code_number = -1;
353738fd1498Szrj 
353838fd1498Szrj   /* See if this is a PARALLEL of two SETs where one SET's destination is
353938fd1498Szrj      a register that is unused and this isn't marked as an instruction that
354038fd1498Szrj      might trap in an EH region.  In that case, we just need the other SET.
354138fd1498Szrj      We prefer this over the PARALLEL.
354238fd1498Szrj 
354338fd1498Szrj      This can occur when simplifying a divmod insn.  We *must* test for this
354438fd1498Szrj      case here because the code below that splits two independent SETs doesn't
354538fd1498Szrj      handle this case correctly when it updates the register status.
354638fd1498Szrj 
354738fd1498Szrj      It's pointless doing this if we originally had two sets, one from
354838fd1498Szrj      i3, and one from i2.  Combining then splitting the parallel results
354938fd1498Szrj      in the original i2 again plus an invalid insn (which we delete).
355038fd1498Szrj      The net effect is only to move instructions around, which makes
355138fd1498Szrj      debug info less accurate.
355238fd1498Szrj 
355338fd1498Szrj      If the remaining SET came from I2 its destination should not be used
355438fd1498Szrj      between I2 and I3.  See PR82024.  */
355538fd1498Szrj 
355638fd1498Szrj   if (!(added_sets_2 && i1 == 0)
355738fd1498Szrj       && is_parallel_of_n_reg_sets (newpat, 2)
355838fd1498Szrj       && asm_noperands (newpat) < 0)
355938fd1498Szrj     {
356038fd1498Szrj       rtx set0 = XVECEXP (newpat, 0, 0);
356138fd1498Szrj       rtx set1 = XVECEXP (newpat, 0, 1);
356238fd1498Szrj       rtx oldpat = newpat;
356338fd1498Szrj 
356438fd1498Szrj       if (((REG_P (SET_DEST (set1))
356538fd1498Szrj 	    && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
356638fd1498Szrj 	   || (GET_CODE (SET_DEST (set1)) == SUBREG
356738fd1498Szrj 	       && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
356838fd1498Szrj 	  && insn_nothrow_p (i3)
356938fd1498Szrj 	  && !side_effects_p (SET_SRC (set1)))
357038fd1498Szrj 	{
357138fd1498Szrj 	  newpat = set0;
357238fd1498Szrj 	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
357338fd1498Szrj 	}
357438fd1498Szrj 
357538fd1498Szrj       else if (((REG_P (SET_DEST (set0))
357638fd1498Szrj 		 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
357738fd1498Szrj 		|| (GET_CODE (SET_DEST (set0)) == SUBREG
357838fd1498Szrj 		    && find_reg_note (i3, REG_UNUSED,
357938fd1498Szrj 				      SUBREG_REG (SET_DEST (set0)))))
358038fd1498Szrj 	       && insn_nothrow_p (i3)
358138fd1498Szrj 	       && !side_effects_p (SET_SRC (set0)))
358238fd1498Szrj 	{
358338fd1498Szrj 	  rtx dest = SET_DEST (set1);
358438fd1498Szrj 	  if (GET_CODE (dest) == SUBREG)
358538fd1498Szrj 	    dest = SUBREG_REG (dest);
358638fd1498Szrj 	  if (!reg_used_between_p (dest, i2, i3))
358738fd1498Szrj 	    {
358838fd1498Szrj 	      newpat = set1;
358938fd1498Szrj 	      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
359038fd1498Szrj 
359138fd1498Szrj 	      if (insn_code_number >= 0)
359238fd1498Szrj 		changed_i3_dest = 1;
359338fd1498Szrj 	    }
359438fd1498Szrj 	}
359538fd1498Szrj 
359638fd1498Szrj       if (insn_code_number < 0)
359738fd1498Szrj 	newpat = oldpat;
359838fd1498Szrj     }
359938fd1498Szrj 
360038fd1498Szrj   /* Is the result of combination a valid instruction?  */
360138fd1498Szrj   if (insn_code_number < 0)
360238fd1498Szrj     insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
360338fd1498Szrj 
360438fd1498Szrj   /* If we were combining three insns and the result is a simple SET
360538fd1498Szrj      with no ASM_OPERANDS that wasn't recognized, try to split it into two
360638fd1498Szrj      insns.  There are two ways to do this.  It can be split using a
360738fd1498Szrj      machine-specific method (like when you have an addition of a large
360838fd1498Szrj      constant) or by combine in the function find_split_point.  */
360938fd1498Szrj 
361038fd1498Szrj   if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
361138fd1498Szrj       && asm_noperands (newpat) < 0)
361238fd1498Szrj     {
361338fd1498Szrj       rtx parallel, *split;
361438fd1498Szrj       rtx_insn *m_split_insn;
361538fd1498Szrj 
361638fd1498Szrj       /* See if the MD file can split NEWPAT.  If it can't, see if letting it
361738fd1498Szrj 	 use I2DEST as a scratch register will help.  In the latter case,
361838fd1498Szrj 	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
361938fd1498Szrj 
362038fd1498Szrj       m_split_insn = combine_split_insns (newpat, i3);
362138fd1498Szrj 
362238fd1498Szrj       /* We can only use I2DEST as a scratch reg if it doesn't overlap any
362338fd1498Szrj 	 inputs of NEWPAT.  */
362438fd1498Szrj 
362538fd1498Szrj       /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
362638fd1498Szrj 	 possible to try that as a scratch reg.  This would require adding
362738fd1498Szrj 	 more code to make it work though.  */
362838fd1498Szrj 
362938fd1498Szrj       if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
363038fd1498Szrj 	{
363138fd1498Szrj 	  machine_mode new_mode = GET_MODE (SET_DEST (newpat));
363238fd1498Szrj 
363338fd1498Szrj 	  /* ??? Reusing i2dest without resetting the reg_stat entry for it
363438fd1498Szrj 	     (temporarily, until we are committed to this instruction
363538fd1498Szrj 	     combination) does not work: for example, any call to nonzero_bits
363638fd1498Szrj 	     on the register (from a splitter in the MD file, for example)
363738fd1498Szrj 	     will get the old information, which is invalid.
363838fd1498Szrj 
363938fd1498Szrj 	     Since nowadays we can create registers during combine just fine,
364038fd1498Szrj 	     we should just create a new one here, not reuse i2dest.  */
364138fd1498Szrj 
364238fd1498Szrj 	  /* First try to split using the original register as a
364338fd1498Szrj 	     scratch register.  */
364438fd1498Szrj 	  parallel = gen_rtx_PARALLEL (VOIDmode,
364538fd1498Szrj 				       gen_rtvec (2, newpat,
364638fd1498Szrj 						  gen_rtx_CLOBBER (VOIDmode,
364738fd1498Szrj 								   i2dest)));
364838fd1498Szrj 	  m_split_insn = combine_split_insns (parallel, i3);
364938fd1498Szrj 
365038fd1498Szrj 	  /* If that didn't work, try changing the mode of I2DEST if
365138fd1498Szrj 	     we can.  */
365238fd1498Szrj 	  if (m_split_insn == 0
365338fd1498Szrj 	      && new_mode != GET_MODE (i2dest)
365438fd1498Szrj 	      && new_mode != VOIDmode
365538fd1498Szrj 	      && can_change_dest_mode (i2dest, added_sets_2, new_mode))
365638fd1498Szrj 	    {
365738fd1498Szrj 	      machine_mode old_mode = GET_MODE (i2dest);
365838fd1498Szrj 	      rtx ni2dest;
365938fd1498Szrj 
366038fd1498Szrj 	      if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
366138fd1498Szrj 		ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
366238fd1498Szrj 	      else
366338fd1498Szrj 		{
366438fd1498Szrj 		  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
366538fd1498Szrj 		  ni2dest = regno_reg_rtx[REGNO (i2dest)];
366638fd1498Szrj 		}
366738fd1498Szrj 
366838fd1498Szrj 	      parallel = (gen_rtx_PARALLEL
366938fd1498Szrj 			  (VOIDmode,
367038fd1498Szrj 			   gen_rtvec (2, newpat,
367138fd1498Szrj 				      gen_rtx_CLOBBER (VOIDmode,
367238fd1498Szrj 						       ni2dest))));
367338fd1498Szrj 	      m_split_insn = combine_split_insns (parallel, i3);
367438fd1498Szrj 
367538fd1498Szrj 	      if (m_split_insn == 0
367638fd1498Szrj 		  && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
367738fd1498Szrj 		{
367838fd1498Szrj 		  struct undo *buf;
367938fd1498Szrj 
368038fd1498Szrj 		  adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
368138fd1498Szrj 		  buf = undobuf.undos;
368238fd1498Szrj 		  undobuf.undos = buf->next;
368338fd1498Szrj 		  buf->next = undobuf.frees;
368438fd1498Szrj 		  undobuf.frees = buf;
368538fd1498Szrj 		}
368638fd1498Szrj 	    }
368738fd1498Szrj 
368838fd1498Szrj 	  i2scratch = m_split_insn != 0;
368938fd1498Szrj 	}
369038fd1498Szrj 
369138fd1498Szrj       /* If recog_for_combine has discarded clobbers, try to use them
369238fd1498Szrj 	 again for the split.  */
369338fd1498Szrj       if (m_split_insn == 0 && newpat_vec_with_clobbers)
369438fd1498Szrj 	{
369538fd1498Szrj 	  parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
369638fd1498Szrj 	  m_split_insn = combine_split_insns (parallel, i3);
369738fd1498Szrj 	}
369838fd1498Szrj 
369938fd1498Szrj       if (m_split_insn && NEXT_INSN (m_split_insn) == NULL_RTX)
370038fd1498Szrj 	{
370138fd1498Szrj 	  rtx m_split_pat = PATTERN (m_split_insn);
370238fd1498Szrj 	  insn_code_number = recog_for_combine (&m_split_pat, i3, &new_i3_notes);
370338fd1498Szrj 	  if (insn_code_number >= 0)
370438fd1498Szrj 	    newpat = m_split_pat;
370538fd1498Szrj 	}
370638fd1498Szrj       else if (m_split_insn && NEXT_INSN (NEXT_INSN (m_split_insn)) == NULL_RTX
370738fd1498Szrj 	       && (next_nonnote_nondebug_insn (i2) == i3
370838fd1498Szrj 		   || !modified_between_p (PATTERN (m_split_insn), i2, i3)))
370938fd1498Szrj 	{
371038fd1498Szrj 	  rtx i2set, i3set;
371138fd1498Szrj 	  rtx newi3pat = PATTERN (NEXT_INSN (m_split_insn));
371238fd1498Szrj 	  newi2pat = PATTERN (m_split_insn);
371338fd1498Szrj 
371438fd1498Szrj 	  i3set = single_set (NEXT_INSN (m_split_insn));
371538fd1498Szrj 	  i2set = single_set (m_split_insn);
371638fd1498Szrj 
371738fd1498Szrj 	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
371838fd1498Szrj 
371938fd1498Szrj 	  /* If I2 or I3 has multiple SETs, we won't know how to track
372038fd1498Szrj 	     register status, so don't use these insns.  If I2's destination
372138fd1498Szrj 	     is used between I2 and I3, we also can't use these insns.  */
372238fd1498Szrj 
372338fd1498Szrj 	  if (i2_code_number >= 0 && i2set && i3set
372438fd1498Szrj 	      && (next_nonnote_nondebug_insn (i2) == i3
372538fd1498Szrj 		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
372638fd1498Szrj 	    insn_code_number = recog_for_combine (&newi3pat, i3,
372738fd1498Szrj 						  &new_i3_notes);
372838fd1498Szrj 	  if (insn_code_number >= 0)
372938fd1498Szrj 	    newpat = newi3pat;
373038fd1498Szrj 
373138fd1498Szrj 	  /* It is possible that both insns now set the destination of I3.
373238fd1498Szrj 	     If so, we must show an extra use of it.  */
373338fd1498Szrj 
373438fd1498Szrj 	  if (insn_code_number >= 0)
373538fd1498Szrj 	    {
373638fd1498Szrj 	      rtx new_i3_dest = SET_DEST (i3set);
373738fd1498Szrj 	      rtx new_i2_dest = SET_DEST (i2set);
373838fd1498Szrj 
373938fd1498Szrj 	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
374038fd1498Szrj 		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
374138fd1498Szrj 		     || GET_CODE (new_i3_dest) == SUBREG)
374238fd1498Szrj 		new_i3_dest = XEXP (new_i3_dest, 0);
374338fd1498Szrj 
374438fd1498Szrj 	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
374538fd1498Szrj 		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
374638fd1498Szrj 		     || GET_CODE (new_i2_dest) == SUBREG)
374738fd1498Szrj 		new_i2_dest = XEXP (new_i2_dest, 0);
374838fd1498Szrj 
374938fd1498Szrj 	      if (REG_P (new_i3_dest)
375038fd1498Szrj 		  && REG_P (new_i2_dest)
375138fd1498Szrj 		  && REGNO (new_i3_dest) == REGNO (new_i2_dest)
375238fd1498Szrj 		  && REGNO (new_i2_dest) < reg_n_sets_max)
375338fd1498Szrj 		INC_REG_N_SETS (REGNO (new_i2_dest), 1);
375438fd1498Szrj 	    }
375538fd1498Szrj 	}
375638fd1498Szrj 
375738fd1498Szrj       /* If we can split it and use I2DEST, go ahead and see if that
375838fd1498Szrj 	 helps things be recognized.  Verify that none of the registers
375938fd1498Szrj 	 are set between I2 and I3.  */
376038fd1498Szrj       if (insn_code_number < 0
376138fd1498Szrj           && (split = find_split_point (&newpat, i3, false)) != 0
376238fd1498Szrj 	  && (!HAVE_cc0 || REG_P (i2dest))
376338fd1498Szrj 	  /* We need I2DEST in the proper mode.  If it is a hard register
376438fd1498Szrj 	     or the only use of a pseudo, we can change its mode.
376538fd1498Szrj 	     Make sure we don't change a hard register to have a mode that
376638fd1498Szrj 	     isn't valid for it, or change the number of registers.  */
376738fd1498Szrj 	  && (GET_MODE (*split) == GET_MODE (i2dest)
376838fd1498Szrj 	      || GET_MODE (*split) == VOIDmode
376938fd1498Szrj 	      || can_change_dest_mode (i2dest, added_sets_2,
377038fd1498Szrj 				       GET_MODE (*split)))
377138fd1498Szrj 	  && (next_nonnote_nondebug_insn (i2) == i3
377238fd1498Szrj 	      || !modified_between_p (*split, i2, i3))
377338fd1498Szrj 	  /* We can't overwrite I2DEST if its value is still used by
377438fd1498Szrj 	     NEWPAT.  */
377538fd1498Szrj 	  && ! reg_referenced_p (i2dest, newpat))
377638fd1498Szrj 	{
377738fd1498Szrj 	  rtx newdest = i2dest;
377838fd1498Szrj 	  enum rtx_code split_code = GET_CODE (*split);
377938fd1498Szrj 	  machine_mode split_mode = GET_MODE (*split);
378038fd1498Szrj 	  bool subst_done = false;
378138fd1498Szrj 	  newi2pat = NULL_RTX;
378238fd1498Szrj 
378338fd1498Szrj 	  i2scratch = true;
378438fd1498Szrj 
378538fd1498Szrj 	  /* *SPLIT may be part of I2SRC, so make sure we have the
378638fd1498Szrj 	     original expression around for later debug processing.
378738fd1498Szrj 	     We should not need I2SRC any more in other cases.  */
378838fd1498Szrj 	  if (MAY_HAVE_DEBUG_BIND_INSNS)
378938fd1498Szrj 	    i2src = copy_rtx (i2src);
379038fd1498Szrj 	  else
379138fd1498Szrj 	    i2src = NULL;
379238fd1498Szrj 
379338fd1498Szrj 	  /* Get NEWDEST as a register in the proper mode.  We have already
379438fd1498Szrj 	     validated that we can do this.  */
379538fd1498Szrj 	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
379638fd1498Szrj 	    {
379738fd1498Szrj 	      if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
379838fd1498Szrj 		newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
379938fd1498Szrj 	      else
380038fd1498Szrj 		{
380138fd1498Szrj 		  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
380238fd1498Szrj 		  newdest = regno_reg_rtx[REGNO (i2dest)];
380338fd1498Szrj 		}
380438fd1498Szrj 	    }
380538fd1498Szrj 
380638fd1498Szrj 	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
380738fd1498Szrj 	     an ASHIFT.  This can occur if it was inside a PLUS and hence
380838fd1498Szrj 	     appeared to be a memory address.  This is a kludge.  */
380938fd1498Szrj 	  if (split_code == MULT
381038fd1498Szrj 	      && CONST_INT_P (XEXP (*split, 1))
381138fd1498Szrj 	      && INTVAL (XEXP (*split, 1)) > 0
381238fd1498Szrj 	      && (i = exact_log2 (UINTVAL (XEXP (*split, 1)))) >= 0)
381338fd1498Szrj 	    {
381438fd1498Szrj 	      rtx i_rtx = gen_int_shift_amount (split_mode, i);
381538fd1498Szrj 	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
381638fd1498Szrj 					     XEXP (*split, 0), i_rtx));
381738fd1498Szrj 	      /* Update split_code because we may not have a multiply
381838fd1498Szrj 		 anymore.  */
381938fd1498Szrj 	      split_code = GET_CODE (*split);
382038fd1498Szrj 	    }
382138fd1498Szrj 
382238fd1498Szrj 	  /* Similarly for (plus (mult FOO (const_int pow2))).  */
382338fd1498Szrj 	  if (split_code == PLUS
382438fd1498Szrj 	      && GET_CODE (XEXP (*split, 0)) == MULT
382538fd1498Szrj 	      && CONST_INT_P (XEXP (XEXP (*split, 0), 1))
382638fd1498Szrj 	      && INTVAL (XEXP (XEXP (*split, 0), 1)) > 0
382738fd1498Szrj 	      && (i = exact_log2 (UINTVAL (XEXP (XEXP (*split, 0), 1)))) >= 0)
382838fd1498Szrj 	    {
382938fd1498Szrj 	      rtx nsplit = XEXP (*split, 0);
383038fd1498Szrj 	      rtx i_rtx = gen_int_shift_amount (GET_MODE (nsplit), i);
383138fd1498Szrj 	      SUBST (XEXP (*split, 0), gen_rtx_ASHIFT (GET_MODE (nsplit),
383238fd1498Szrj 						       XEXP (nsplit, 0),
383338fd1498Szrj 						       i_rtx));
383438fd1498Szrj 	      /* Update split_code because we may not have a multiply
383538fd1498Szrj 		 anymore.  */
383638fd1498Szrj 	      split_code = GET_CODE (*split);
383738fd1498Szrj 	    }
383838fd1498Szrj 
383938fd1498Szrj #ifdef INSN_SCHEDULING
384038fd1498Szrj 	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
384138fd1498Szrj 	     be written as a ZERO_EXTEND.  */
384238fd1498Szrj 	  if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
384338fd1498Szrj 	    {
384438fd1498Szrj 	      /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
384538fd1498Szrj 		 what it really is.  */
384638fd1498Szrj 	      if (load_extend_op (GET_MODE (SUBREG_REG (*split)))
384738fd1498Szrj 		  == SIGN_EXTEND)
384838fd1498Szrj 		SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
384938fd1498Szrj 						    SUBREG_REG (*split)));
385038fd1498Szrj 	      else
385138fd1498Szrj 		SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
385238fd1498Szrj 						    SUBREG_REG (*split)));
385338fd1498Szrj 	    }
385438fd1498Szrj #endif
385538fd1498Szrj 
385638fd1498Szrj 	  /* Attempt to split binary operators using arithmetic identities.  */
385738fd1498Szrj 	  if (BINARY_P (SET_SRC (newpat))
385838fd1498Szrj 	      && split_mode == GET_MODE (SET_SRC (newpat))
385938fd1498Szrj 	      && ! side_effects_p (SET_SRC (newpat)))
386038fd1498Szrj 	    {
386138fd1498Szrj 	      rtx setsrc = SET_SRC (newpat);
386238fd1498Szrj 	      machine_mode mode = GET_MODE (setsrc);
386338fd1498Szrj 	      enum rtx_code code = GET_CODE (setsrc);
386438fd1498Szrj 	      rtx src_op0 = XEXP (setsrc, 0);
386538fd1498Szrj 	      rtx src_op1 = XEXP (setsrc, 1);
386638fd1498Szrj 
386738fd1498Szrj 	      /* Split "X = Y op Y" as "Z = Y; X = Z op Z".  */
386838fd1498Szrj 	      if (rtx_equal_p (src_op0, src_op1))
386938fd1498Szrj 		{
387038fd1498Szrj 		  newi2pat = gen_rtx_SET (newdest, src_op0);
387138fd1498Szrj 		  SUBST (XEXP (setsrc, 0), newdest);
387238fd1498Szrj 		  SUBST (XEXP (setsrc, 1), newdest);
387338fd1498Szrj 		  subst_done = true;
387438fd1498Szrj 		}
387538fd1498Szrj 	      /* Split "((P op Q) op R) op S" where op is PLUS or MULT.  */
387638fd1498Szrj 	      else if ((code == PLUS || code == MULT)
387738fd1498Szrj 		       && GET_CODE (src_op0) == code
387838fd1498Szrj 		       && GET_CODE (XEXP (src_op0, 0)) == code
387938fd1498Szrj 		       && (INTEGRAL_MODE_P (mode)
388038fd1498Szrj 			   || (FLOAT_MODE_P (mode)
388138fd1498Szrj 			       && flag_unsafe_math_optimizations)))
388238fd1498Szrj 		{
388338fd1498Szrj 		  rtx p = XEXP (XEXP (src_op0, 0), 0);
388438fd1498Szrj 		  rtx q = XEXP (XEXP (src_op0, 0), 1);
388538fd1498Szrj 		  rtx r = XEXP (src_op0, 1);
388638fd1498Szrj 		  rtx s = src_op1;
388738fd1498Szrj 
388838fd1498Szrj 		  /* Split both "((X op Y) op X) op Y" and
388938fd1498Szrj 		     "((X op Y) op Y) op X" as "T op T" where T is
389038fd1498Szrj 		     "X op Y".  */
389138fd1498Szrj 		  if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
389238fd1498Szrj 		       || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
389338fd1498Szrj 		    {
389438fd1498Szrj 		      newi2pat = gen_rtx_SET (newdest, XEXP (src_op0, 0));
389538fd1498Szrj 		      SUBST (XEXP (setsrc, 0), newdest);
389638fd1498Szrj 		      SUBST (XEXP (setsrc, 1), newdest);
389738fd1498Szrj 		      subst_done = true;
389838fd1498Szrj 		    }
389938fd1498Szrj 		  /* Split "((X op X) op Y) op Y)" as "T op T" where
390038fd1498Szrj 		     T is "X op Y".  */
390138fd1498Szrj 		  else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
390238fd1498Szrj 		    {
390338fd1498Szrj 		      rtx tmp = simplify_gen_binary (code, mode, p, r);
390438fd1498Szrj 		      newi2pat = gen_rtx_SET (newdest, tmp);
390538fd1498Szrj 		      SUBST (XEXP (setsrc, 0), newdest);
390638fd1498Szrj 		      SUBST (XEXP (setsrc, 1), newdest);
390738fd1498Szrj 		      subst_done = true;
390838fd1498Szrj 		    }
390938fd1498Szrj 		}
391038fd1498Szrj 	    }
391138fd1498Szrj 
391238fd1498Szrj 	  if (!subst_done)
391338fd1498Szrj 	    {
391438fd1498Szrj 	      newi2pat = gen_rtx_SET (newdest, *split);
391538fd1498Szrj 	      SUBST (*split, newdest);
391638fd1498Szrj 	    }
391738fd1498Szrj 
391838fd1498Szrj 	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
391938fd1498Szrj 
392038fd1498Szrj 	  /* recog_for_combine might have added CLOBBERs to newi2pat.
392138fd1498Szrj 	     Make sure NEWPAT does not depend on the clobbered regs.  */
392238fd1498Szrj 	  if (GET_CODE (newi2pat) == PARALLEL)
392338fd1498Szrj 	    for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
392438fd1498Szrj 	      if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
392538fd1498Szrj 		{
392638fd1498Szrj 		  rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
392738fd1498Szrj 		  if (reg_overlap_mentioned_p (reg, newpat))
392838fd1498Szrj 		    {
392938fd1498Szrj 		      undo_all ();
393038fd1498Szrj 		      return 0;
393138fd1498Szrj 		    }
393238fd1498Szrj 		}
393338fd1498Szrj 
393438fd1498Szrj 	  /* If the split point was a MULT and we didn't have one before,
393538fd1498Szrj 	     don't use one now.  */
393638fd1498Szrj 	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
393738fd1498Szrj 	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
393838fd1498Szrj 	}
393938fd1498Szrj     }
394038fd1498Szrj 
394138fd1498Szrj   /* Check for a case where we loaded from memory in a narrow mode and
394238fd1498Szrj      then sign extended it, but we need both registers.  In that case,
394338fd1498Szrj      we have a PARALLEL with both loads from the same memory location.
394438fd1498Szrj      We can split this into a load from memory followed by a register-register
394538fd1498Szrj      copy.  This saves at least one insn, more if register allocation can
394638fd1498Szrj      eliminate the copy.
394738fd1498Szrj 
394838fd1498Szrj      We cannot do this if the destination of the first assignment is a
394938fd1498Szrj      condition code register or cc0.  We eliminate this case by making sure
395038fd1498Szrj      the SET_DEST and SET_SRC have the same mode.
395138fd1498Szrj 
395238fd1498Szrj      We cannot do this if the destination of the second assignment is
395338fd1498Szrj      a register that we have already assumed is zero-extended.  Similarly
395438fd1498Szrj      for a SUBREG of such a register.  */
395538fd1498Szrj 
395638fd1498Szrj   else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
395738fd1498Szrj 	   && GET_CODE (newpat) == PARALLEL
395838fd1498Szrj 	   && XVECLEN (newpat, 0) == 2
395938fd1498Szrj 	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
396038fd1498Szrj 	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
396138fd1498Szrj 	   && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
396238fd1498Szrj 	       == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
396338fd1498Szrj 	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
396438fd1498Szrj 	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
396538fd1498Szrj 			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
396638fd1498Szrj 	   && !modified_between_p (SET_SRC (XVECEXP (newpat, 0, 1)), i2, i3)
396738fd1498Szrj 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
396838fd1498Szrj 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
396938fd1498Szrj 	   && ! (temp_expr = SET_DEST (XVECEXP (newpat, 0, 1)),
397038fd1498Szrj 		 (REG_P (temp_expr)
397138fd1498Szrj 		  && reg_stat[REGNO (temp_expr)].nonzero_bits != 0
397238fd1498Szrj 		  && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
397338fd1498Szrj 			       BITS_PER_WORD)
397438fd1498Szrj 		  && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
397538fd1498Szrj 			       HOST_BITS_PER_INT)
397638fd1498Szrj 		  && (reg_stat[REGNO (temp_expr)].nonzero_bits
397738fd1498Szrj 		      != GET_MODE_MASK (word_mode))))
397838fd1498Szrj 	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
397938fd1498Szrj 		 && (temp_expr = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
398038fd1498Szrj 		     (REG_P (temp_expr)
398138fd1498Szrj 		      && reg_stat[REGNO (temp_expr)].nonzero_bits != 0
398238fd1498Szrj 		      && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
398338fd1498Szrj 				   BITS_PER_WORD)
398438fd1498Szrj 		      && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
398538fd1498Szrj 				   HOST_BITS_PER_INT)
398638fd1498Szrj 		      && (reg_stat[REGNO (temp_expr)].nonzero_bits
398738fd1498Szrj 			  != GET_MODE_MASK (word_mode)))))
398838fd1498Szrj 	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
398938fd1498Szrj 					 SET_SRC (XVECEXP (newpat, 0, 1)))
399038fd1498Szrj 	   && ! find_reg_note (i3, REG_UNUSED,
399138fd1498Szrj 			       SET_DEST (XVECEXP (newpat, 0, 0))))
399238fd1498Szrj     {
399338fd1498Szrj       rtx ni2dest;
399438fd1498Szrj 
399538fd1498Szrj       newi2pat = XVECEXP (newpat, 0, 0);
399638fd1498Szrj       ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
399738fd1498Szrj       newpat = XVECEXP (newpat, 0, 1);
399838fd1498Szrj       SUBST (SET_SRC (newpat),
399938fd1498Szrj 	     gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
400038fd1498Szrj       i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
400138fd1498Szrj 
400238fd1498Szrj       if (i2_code_number >= 0)
400338fd1498Szrj 	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
400438fd1498Szrj 
400538fd1498Szrj       if (insn_code_number >= 0)
400638fd1498Szrj 	swap_i2i3 = 1;
400738fd1498Szrj     }
400838fd1498Szrj 
400938fd1498Szrj   /* Similarly, check for a case where we have a PARALLEL of two independent
401038fd1498Szrj      SETs but we started with three insns.  In this case, we can do the sets
401138fd1498Szrj      as two separate insns.  This case occurs when some SET allows two
401238fd1498Szrj      other insns to combine, but the destination of that SET is still live.
401338fd1498Szrj 
401438fd1498Szrj      Also do this if we started with two insns and (at least) one of the
4015*58e805e6Szrj      resulting sets is a noop; this noop will be deleted later.
4016*58e805e6Szrj 
4017*58e805e6Szrj      Also do this if we started with two insns neither of which was a simple
4018*58e805e6Szrj      move.  */
401938fd1498Szrj 
402038fd1498Szrj   else if (insn_code_number < 0 && asm_noperands (newpat) < 0
402138fd1498Szrj 	   && GET_CODE (newpat) == PARALLEL
402238fd1498Szrj 	   && XVECLEN (newpat, 0) == 2
402338fd1498Szrj 	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
402438fd1498Szrj 	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
402538fd1498Szrj 	   && (i1 || set_noop_p (XVECEXP (newpat, 0, 0))
402638fd1498Szrj 		  || set_noop_p (XVECEXP (newpat, 0, 1)))
402738fd1498Szrj 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
402838fd1498Szrj 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
402938fd1498Szrj 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
403038fd1498Szrj 	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
403138fd1498Szrj 	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
403238fd1498Szrj 				  XVECEXP (newpat, 0, 0))
403338fd1498Szrj 	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
403438fd1498Szrj 				  XVECEXP (newpat, 0, 1))
403538fd1498Szrj 	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
403638fd1498Szrj 		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
403738fd1498Szrj     {
403838fd1498Szrj       rtx set0 = XVECEXP (newpat, 0, 0);
403938fd1498Szrj       rtx set1 = XVECEXP (newpat, 0, 1);
404038fd1498Szrj 
404138fd1498Szrj       /* Normally, it doesn't matter which of the two is done first,
404238fd1498Szrj 	 but the one that references cc0 can't be the second, and
404338fd1498Szrj 	 one which uses any regs/memory set in between i2 and i3 can't
404438fd1498Szrj 	 be first.  The PARALLEL might also have been pre-existing in i3,
404538fd1498Szrj 	 so we need to make sure that we won't wrongly hoist a SET to i2
4046*58e805e6Szrj 	 that would conflict with a death note present in there, or would
4047*58e805e6Szrj 	 have its dest modified between i2 and i3.  */
404838fd1498Szrj       if (!modified_between_p (SET_SRC (set1), i2, i3)
404938fd1498Szrj 	  && !(REG_P (SET_DEST (set1))
405038fd1498Szrj 	       && find_reg_note (i2, REG_DEAD, SET_DEST (set1)))
405138fd1498Szrj 	  && !(GET_CODE (SET_DEST (set1)) == SUBREG
405238fd1498Szrj 	       && find_reg_note (i2, REG_DEAD,
405338fd1498Szrj 				 SUBREG_REG (SET_DEST (set1))))
4054*58e805e6Szrj 	  && !modified_between_p (SET_DEST (set1), i2, i3)
405538fd1498Szrj 	  && (!HAVE_cc0 || !reg_referenced_p (cc0_rtx, set0))
405638fd1498Szrj 	  /* If I3 is a jump, ensure that set0 is a jump so that
405738fd1498Szrj 	     we do not create invalid RTL.  */
405838fd1498Szrj 	  && (!JUMP_P (i3) || SET_DEST (set0) == pc_rtx)
405938fd1498Szrj 	 )
406038fd1498Szrj 	{
406138fd1498Szrj 	  newi2pat = set1;
406238fd1498Szrj 	  newpat = set0;
406338fd1498Szrj 	}
406438fd1498Szrj       else if (!modified_between_p (SET_SRC (set0), i2, i3)
406538fd1498Szrj 	       && !(REG_P (SET_DEST (set0))
406638fd1498Szrj 		    && find_reg_note (i2, REG_DEAD, SET_DEST (set0)))
406738fd1498Szrj 	       && !(GET_CODE (SET_DEST (set0)) == SUBREG
406838fd1498Szrj 		    && find_reg_note (i2, REG_DEAD,
406938fd1498Szrj 				      SUBREG_REG (SET_DEST (set0))))
4070*58e805e6Szrj 	       && !modified_between_p (SET_DEST (set0), i2, i3)
407138fd1498Szrj 	       && (!HAVE_cc0 || !reg_referenced_p (cc0_rtx, set1))
407238fd1498Szrj 	       /* If I3 is a jump, ensure that set1 is a jump so that
407338fd1498Szrj 		  we do not create invalid RTL.  */
407438fd1498Szrj 	       && (!JUMP_P (i3) || SET_DEST (set1) == pc_rtx)
407538fd1498Szrj 	      )
407638fd1498Szrj 	{
407738fd1498Szrj 	  newi2pat = set0;
407838fd1498Szrj 	  newpat = set1;
407938fd1498Szrj 	}
408038fd1498Szrj       else
408138fd1498Szrj 	{
408238fd1498Szrj 	  undo_all ();
408338fd1498Szrj 	  return 0;
408438fd1498Szrj 	}
408538fd1498Szrj 
408638fd1498Szrj       i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
408738fd1498Szrj 
408838fd1498Szrj       if (i2_code_number >= 0)
408938fd1498Szrj 	{
409038fd1498Szrj 	  /* recog_for_combine might have added CLOBBERs to newi2pat.
409138fd1498Szrj 	     Make sure NEWPAT does not depend on the clobbered regs.  */
409238fd1498Szrj 	  if (GET_CODE (newi2pat) == PARALLEL)
409338fd1498Szrj 	    {
409438fd1498Szrj 	      for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
409538fd1498Szrj 		if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
409638fd1498Szrj 		  {
409738fd1498Szrj 		    rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
409838fd1498Szrj 		    if (reg_overlap_mentioned_p (reg, newpat))
409938fd1498Szrj 		      {
410038fd1498Szrj 			undo_all ();
410138fd1498Szrj 			return 0;
410238fd1498Szrj 		      }
410338fd1498Szrj 		  }
410438fd1498Szrj 	    }
410538fd1498Szrj 
410638fd1498Szrj 	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
410738fd1498Szrj 
410838fd1498Szrj 	  if (insn_code_number >= 0)
410938fd1498Szrj 	    split_i2i3 = 1;
411038fd1498Szrj 	}
411138fd1498Szrj     }
411238fd1498Szrj 
411338fd1498Szrj   /* If it still isn't recognized, fail and change things back the way they
411438fd1498Szrj      were.  */
411538fd1498Szrj   if ((insn_code_number < 0
411638fd1498Szrj        /* Is the result a reasonable ASM_OPERANDS?  */
411738fd1498Szrj        && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
411838fd1498Szrj     {
411938fd1498Szrj       undo_all ();
412038fd1498Szrj       return 0;
412138fd1498Szrj     }
412238fd1498Szrj 
412338fd1498Szrj   /* If we had to change another insn, make sure it is valid also.  */
412438fd1498Szrj   if (undobuf.other_insn)
412538fd1498Szrj     {
412638fd1498Szrj       CLEAR_HARD_REG_SET (newpat_used_regs);
412738fd1498Szrj 
412838fd1498Szrj       other_pat = PATTERN (undobuf.other_insn);
412938fd1498Szrj       other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
413038fd1498Szrj 					     &new_other_notes);
413138fd1498Szrj 
413238fd1498Szrj       if (other_code_number < 0 && ! check_asm_operands (other_pat))
413338fd1498Szrj 	{
413438fd1498Szrj 	  undo_all ();
413538fd1498Szrj 	  return 0;
413638fd1498Szrj 	}
413738fd1498Szrj     }
413838fd1498Szrj 
413938fd1498Szrj   /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
414038fd1498Szrj      they are adjacent to each other or not.  */
414138fd1498Szrj   if (HAVE_cc0)
414238fd1498Szrj     {
414338fd1498Szrj       rtx_insn *p = prev_nonnote_insn (i3);
414438fd1498Szrj       if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
414538fd1498Szrj 	  && sets_cc0_p (newi2pat))
414638fd1498Szrj 	{
414738fd1498Szrj 	  undo_all ();
414838fd1498Szrj 	  return 0;
414938fd1498Szrj 	}
415038fd1498Szrj     }
415138fd1498Szrj 
415238fd1498Szrj   /* Only allow this combination if insn_cost reports that the
415338fd1498Szrj      replacement instructions are cheaper than the originals.  */
415438fd1498Szrj   if (!combine_validate_cost (i0, i1, i2, i3, newpat, newi2pat, other_pat))
415538fd1498Szrj     {
415638fd1498Szrj       undo_all ();
415738fd1498Szrj       return 0;
415838fd1498Szrj     }
415938fd1498Szrj 
416038fd1498Szrj   if (MAY_HAVE_DEBUG_BIND_INSNS)
416138fd1498Szrj     {
416238fd1498Szrj       struct undo *undo;
416338fd1498Szrj 
416438fd1498Szrj       for (undo = undobuf.undos; undo; undo = undo->next)
416538fd1498Szrj 	if (undo->kind == UNDO_MODE)
416638fd1498Szrj 	  {
416738fd1498Szrj 	    rtx reg = *undo->where.r;
416838fd1498Szrj 	    machine_mode new_mode = GET_MODE (reg);
416938fd1498Szrj 	    machine_mode old_mode = undo->old_contents.m;
417038fd1498Szrj 
417138fd1498Szrj 	    /* Temporarily revert mode back.  */
417238fd1498Szrj 	    adjust_reg_mode (reg, old_mode);
417338fd1498Szrj 
417438fd1498Szrj 	    if (reg == i2dest && i2scratch)
417538fd1498Szrj 	      {
417638fd1498Szrj 		/* If we used i2dest as a scratch register with a
417738fd1498Szrj 		   different mode, substitute it for the original
417838fd1498Szrj 		   i2src while its original mode is temporarily
417938fd1498Szrj 		   restored, and then clear i2scratch so that we don't
418038fd1498Szrj 		   do it again later.  */
418138fd1498Szrj 		propagate_for_debug (i2, last_combined_insn, reg, i2src,
418238fd1498Szrj 				     this_basic_block);
418338fd1498Szrj 		i2scratch = false;
418438fd1498Szrj 		/* Put back the new mode.  */
418538fd1498Szrj 		adjust_reg_mode (reg, new_mode);
418638fd1498Szrj 	      }
418738fd1498Szrj 	    else
418838fd1498Szrj 	      {
418938fd1498Szrj 		rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
419038fd1498Szrj 		rtx_insn *first, *last;
419138fd1498Szrj 
419238fd1498Szrj 		if (reg == i2dest)
419338fd1498Szrj 		  {
419438fd1498Szrj 		    first = i2;
419538fd1498Szrj 		    last = last_combined_insn;
419638fd1498Szrj 		  }
419738fd1498Szrj 		else
419838fd1498Szrj 		  {
419938fd1498Szrj 		    first = i3;
420038fd1498Szrj 		    last = undobuf.other_insn;
420138fd1498Szrj 		    gcc_assert (last);
420238fd1498Szrj 		    if (DF_INSN_LUID (last)
420338fd1498Szrj 			< DF_INSN_LUID (last_combined_insn))
420438fd1498Szrj 		      last = last_combined_insn;
420538fd1498Szrj 		  }
420638fd1498Szrj 
420738fd1498Szrj 		/* We're dealing with a reg that changed mode but not
420838fd1498Szrj 		   meaning, so we want to turn it into a subreg for
420938fd1498Szrj 		   the new mode.  However, because of REG sharing and
421038fd1498Szrj 		   because its mode had already changed, we have to do
421138fd1498Szrj 		   it in two steps.  First, replace any debug uses of
421238fd1498Szrj 		   reg, with its original mode temporarily restored,
421338fd1498Szrj 		   with this copy we have created; then, replace the
421438fd1498Szrj 		   copy with the SUBREG of the original shared reg,
421538fd1498Szrj 		   once again changed to the new mode.  */
421638fd1498Szrj 		propagate_for_debug (first, last, reg, tempreg,
421738fd1498Szrj 				     this_basic_block);
421838fd1498Szrj 		adjust_reg_mode (reg, new_mode);
421938fd1498Szrj 		propagate_for_debug (first, last, tempreg,
422038fd1498Szrj 				     lowpart_subreg (old_mode, reg, new_mode),
422138fd1498Szrj 				     this_basic_block);
422238fd1498Szrj 	      }
422338fd1498Szrj 	  }
422438fd1498Szrj     }
422538fd1498Szrj 
422638fd1498Szrj   /* If we will be able to accept this, we have made a
422738fd1498Szrj      change to the destination of I3.  This requires us to
422838fd1498Szrj      do a few adjustments.  */
422938fd1498Szrj 
423038fd1498Szrj   if (changed_i3_dest)
423138fd1498Szrj     {
423238fd1498Szrj       PATTERN (i3) = newpat;
423338fd1498Szrj       adjust_for_new_dest (i3);
423438fd1498Szrj     }
423538fd1498Szrj 
423638fd1498Szrj   /* We now know that we can do this combination.  Merge the insns and
423738fd1498Szrj      update the status of registers and LOG_LINKS.  */
423838fd1498Szrj 
423938fd1498Szrj   if (undobuf.other_insn)
424038fd1498Szrj     {
424138fd1498Szrj       rtx note, next;
424238fd1498Szrj 
424338fd1498Szrj       PATTERN (undobuf.other_insn) = other_pat;
424438fd1498Szrj 
424538fd1498Szrj       /* If any of the notes in OTHER_INSN were REG_DEAD or REG_UNUSED,
424638fd1498Szrj 	 ensure that they are still valid.  Then add any non-duplicate
424738fd1498Szrj 	 notes added by recog_for_combine.  */
424838fd1498Szrj       for (note = REG_NOTES (undobuf.other_insn); note; note = next)
424938fd1498Szrj 	{
425038fd1498Szrj 	  next = XEXP (note, 1);
425138fd1498Szrj 
425238fd1498Szrj 	  if ((REG_NOTE_KIND (note) == REG_DEAD
425338fd1498Szrj 	       && !reg_referenced_p (XEXP (note, 0),
425438fd1498Szrj 				     PATTERN (undobuf.other_insn)))
425538fd1498Szrj 	      ||(REG_NOTE_KIND (note) == REG_UNUSED
425638fd1498Szrj 		 && !reg_set_p (XEXP (note, 0),
425738fd1498Szrj 				PATTERN (undobuf.other_insn)))
425838fd1498Szrj 	      /* Simply drop equal note since it may be no longer valid
425938fd1498Szrj 		 for other_insn.  It may be possible to record that CC
426038fd1498Szrj 		 register is changed and only discard those notes, but
426138fd1498Szrj 		 in practice it's unnecessary complication and doesn't
426238fd1498Szrj 		 give any meaningful improvement.
426338fd1498Szrj 
426438fd1498Szrj 		 See PR78559.  */
426538fd1498Szrj 	      || REG_NOTE_KIND (note) == REG_EQUAL
426638fd1498Szrj 	      || REG_NOTE_KIND (note) == REG_EQUIV)
426738fd1498Szrj 	    remove_note (undobuf.other_insn, note);
426838fd1498Szrj 	}
426938fd1498Szrj 
427038fd1498Szrj       distribute_notes  (new_other_notes, undobuf.other_insn,
427138fd1498Szrj 			undobuf.other_insn, NULL, NULL_RTX, NULL_RTX,
427238fd1498Szrj 			NULL_RTX);
427338fd1498Szrj     }
427438fd1498Szrj 
427538fd1498Szrj   if (swap_i2i3)
427638fd1498Szrj     {
427738fd1498Szrj       /* I3 now uses what used to be its destination and which is now
427838fd1498Szrj 	 I2's destination.  This requires us to do a few adjustments.  */
427938fd1498Szrj       PATTERN (i3) = newpat;
428038fd1498Szrj       adjust_for_new_dest (i3);
428138fd1498Szrj     }
428238fd1498Szrj 
428338fd1498Szrj   if (swap_i2i3 || split_i2i3)
428438fd1498Szrj     {
428538fd1498Szrj       /* We might need a LOG_LINK from I3 to I2.  But then we used to
428638fd1498Szrj 	 have one, so we still will.
428738fd1498Szrj 
428838fd1498Szrj 	 However, some later insn might be using I2's dest and have
428938fd1498Szrj 	 a LOG_LINK pointing at I3.  We should change it to point at
429038fd1498Szrj 	 I2 instead.  */
429138fd1498Szrj 
429238fd1498Szrj       /* newi2pat is usually a SET here; however, recog_for_combine might
429338fd1498Szrj 	 have added some clobbers.  */
429438fd1498Szrj       rtx x = newi2pat;
429538fd1498Szrj       if (GET_CODE (x) == PARALLEL)
429638fd1498Szrj 	x = XVECEXP (newi2pat, 0, 0);
429738fd1498Szrj 
429838fd1498Szrj       /* It can only be a SET of a REG or of a SUBREG of a REG.  */
429938fd1498Szrj       unsigned int regno = reg_or_subregno (SET_DEST (x));
430038fd1498Szrj 
430138fd1498Szrj       bool done = false;
430238fd1498Szrj       for (rtx_insn *insn = NEXT_INSN (i3);
430338fd1498Szrj 	   !done
430438fd1498Szrj 	   && insn
430538fd1498Szrj 	   && NONDEBUG_INSN_P (insn)
430638fd1498Szrj 	   && BLOCK_FOR_INSN (insn) == this_basic_block;
430738fd1498Szrj 	   insn = NEXT_INSN (insn))
430838fd1498Szrj 	{
430938fd1498Szrj 	  struct insn_link *link;
431038fd1498Szrj 	  FOR_EACH_LOG_LINK (link, insn)
431138fd1498Szrj 	    if (link->insn == i3 && link->regno == regno)
431238fd1498Szrj 	      {
431338fd1498Szrj 		link->insn = i2;
431438fd1498Szrj 		done = true;
431538fd1498Szrj 		break;
431638fd1498Szrj 	      }
431738fd1498Szrj 	}
431838fd1498Szrj     }
431938fd1498Szrj 
432038fd1498Szrj   {
432138fd1498Szrj     rtx i3notes, i2notes, i1notes = 0, i0notes = 0;
432238fd1498Szrj     struct insn_link *i3links, *i2links, *i1links = 0, *i0links = 0;
432338fd1498Szrj     rtx midnotes = 0;
432438fd1498Szrj     int from_luid;
432538fd1498Szrj     /* Compute which registers we expect to eliminate.  newi2pat may be setting
432638fd1498Szrj        either i3dest or i2dest, so we must check it.  */
432738fd1498Szrj     rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
432838fd1498Szrj 		   || i2dest_in_i2src || i2dest_in_i1src || i2dest_in_i0src
432938fd1498Szrj 		   || !i2dest_killed
433038fd1498Szrj 		   ? 0 : i2dest);
433138fd1498Szrj     /* For i1, we need to compute both local elimination and global
433238fd1498Szrj        elimination information with respect to newi2pat because i1dest
433338fd1498Szrj        may be the same as i3dest, in which case newi2pat may be setting
433438fd1498Szrj        i1dest.  Global information is used when distributing REG_DEAD
433538fd1498Szrj        note for i2 and i3, in which case it does matter if newi2pat sets
433638fd1498Szrj        i1dest or not.
433738fd1498Szrj 
433838fd1498Szrj        Local information is used when distributing REG_DEAD note for i1,
433938fd1498Szrj        in which case it doesn't matter if newi2pat sets i1dest or not.
434038fd1498Szrj        See PR62151, if we have four insns combination:
434138fd1498Szrj 	   i0: r0 <- i0src
434238fd1498Szrj 	   i1: r1 <- i1src (using r0)
434338fd1498Szrj 		     REG_DEAD (r0)
434438fd1498Szrj 	   i2: r0 <- i2src (using r1)
434538fd1498Szrj 	   i3: r3 <- i3src (using r0)
434638fd1498Szrj 	   ix: using r0
434738fd1498Szrj        From i1's point of view, r0 is eliminated, no matter if it is set
434838fd1498Szrj        by newi2pat or not.  In other words, REG_DEAD info for r0 in i1
434938fd1498Szrj        should be discarded.
435038fd1498Szrj 
435138fd1498Szrj        Note local information only affects cases in forms like "I1->I2->I3",
435238fd1498Szrj        "I0->I1->I2->I3" or "I0&I1->I2, I2->I3".  For other cases like
435338fd1498Szrj        "I0->I1, I1&I2->I3" or "I1&I2->I3", newi2pat won't set i1dest or
435438fd1498Szrj        i0dest anyway.  */
435538fd1498Szrj     rtx local_elim_i1 = (i1 == 0 || i1dest_in_i1src || i1dest_in_i0src
435638fd1498Szrj 			 || !i1dest_killed
435738fd1498Szrj 			 ? 0 : i1dest);
435838fd1498Szrj     rtx elim_i1 = (local_elim_i1 == 0
435938fd1498Szrj 		   || (newi2pat && reg_set_p (i1dest, newi2pat))
436038fd1498Szrj 		   ? 0 : i1dest);
436138fd1498Szrj     /* Same case as i1.  */
436238fd1498Szrj     rtx local_elim_i0 = (i0 == 0 || i0dest_in_i0src || !i0dest_killed
436338fd1498Szrj 			 ? 0 : i0dest);
436438fd1498Szrj     rtx elim_i0 = (local_elim_i0 == 0
436538fd1498Szrj 		   || (newi2pat && reg_set_p (i0dest, newi2pat))
436638fd1498Szrj 		   ? 0 : i0dest);
436738fd1498Szrj 
436838fd1498Szrj     /* Get the old REG_NOTES and LOG_LINKS from all our insns and
436938fd1498Szrj        clear them.  */
437038fd1498Szrj     i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
437138fd1498Szrj     i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
437238fd1498Szrj     if (i1)
437338fd1498Szrj       i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
437438fd1498Szrj     if (i0)
437538fd1498Szrj       i0notes = REG_NOTES (i0), i0links = LOG_LINKS (i0);
437638fd1498Szrj 
437738fd1498Szrj     /* Ensure that we do not have something that should not be shared but
437838fd1498Szrj        occurs multiple times in the new insns.  Check this by first
437938fd1498Szrj        resetting all the `used' flags and then copying anything is shared.  */
438038fd1498Szrj 
438138fd1498Szrj     reset_used_flags (i3notes);
438238fd1498Szrj     reset_used_flags (i2notes);
438338fd1498Szrj     reset_used_flags (i1notes);
438438fd1498Szrj     reset_used_flags (i0notes);
438538fd1498Szrj     reset_used_flags (newpat);
438638fd1498Szrj     reset_used_flags (newi2pat);
438738fd1498Szrj     if (undobuf.other_insn)
438838fd1498Szrj       reset_used_flags (PATTERN (undobuf.other_insn));
438938fd1498Szrj 
439038fd1498Szrj     i3notes = copy_rtx_if_shared (i3notes);
439138fd1498Szrj     i2notes = copy_rtx_if_shared (i2notes);
439238fd1498Szrj     i1notes = copy_rtx_if_shared (i1notes);
439338fd1498Szrj     i0notes = copy_rtx_if_shared (i0notes);
439438fd1498Szrj     newpat = copy_rtx_if_shared (newpat);
439538fd1498Szrj     newi2pat = copy_rtx_if_shared (newi2pat);
439638fd1498Szrj     if (undobuf.other_insn)
439738fd1498Szrj       reset_used_flags (PATTERN (undobuf.other_insn));
439838fd1498Szrj 
439938fd1498Szrj     INSN_CODE (i3) = insn_code_number;
440038fd1498Szrj     PATTERN (i3) = newpat;
440138fd1498Szrj 
440238fd1498Szrj     if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
440338fd1498Szrj       {
440438fd1498Szrj 	for (rtx link = CALL_INSN_FUNCTION_USAGE (i3); link;
440538fd1498Szrj 	     link = XEXP (link, 1))
440638fd1498Szrj 	  {
440738fd1498Szrj 	    if (substed_i2)
440838fd1498Szrj 	      {
440938fd1498Szrj 		/* I2SRC must still be meaningful at this point.  Some
441038fd1498Szrj 		   splitting operations can invalidate I2SRC, but those
441138fd1498Szrj 		   operations do not apply to calls.  */
441238fd1498Szrj 		gcc_assert (i2src);
441338fd1498Szrj 		XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
441438fd1498Szrj 						       i2dest, i2src);
441538fd1498Szrj 	      }
441638fd1498Szrj 	    if (substed_i1)
441738fd1498Szrj 	      XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
441838fd1498Szrj 						     i1dest, i1src);
441938fd1498Szrj 	    if (substed_i0)
442038fd1498Szrj 	      XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
442138fd1498Szrj 						     i0dest, i0src);
442238fd1498Szrj 	  }
442338fd1498Szrj       }
442438fd1498Szrj 
442538fd1498Szrj     if (undobuf.other_insn)
442638fd1498Szrj       INSN_CODE (undobuf.other_insn) = other_code_number;
442738fd1498Szrj 
442838fd1498Szrj     /* We had one special case above where I2 had more than one set and
442938fd1498Szrj        we replaced a destination of one of those sets with the destination
443038fd1498Szrj        of I3.  In that case, we have to update LOG_LINKS of insns later
443138fd1498Szrj        in this basic block.  Note that this (expensive) case is rare.
443238fd1498Szrj 
443338fd1498Szrj        Also, in this case, we must pretend that all REG_NOTEs for I2
443438fd1498Szrj        actually came from I3, so that REG_UNUSED notes from I2 will be
443538fd1498Szrj        properly handled.  */
443638fd1498Szrj 
443738fd1498Szrj     if (i3_subst_into_i2)
443838fd1498Szrj       {
443938fd1498Szrj 	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
444038fd1498Szrj 	  if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
444138fd1498Szrj 	       || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
444238fd1498Szrj 	      && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
444338fd1498Szrj 	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
444438fd1498Szrj 	      && ! find_reg_note (i2, REG_UNUSED,
444538fd1498Szrj 				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
444638fd1498Szrj 	    for (temp_insn = NEXT_INSN (i2);
444738fd1498Szrj 		 temp_insn
444838fd1498Szrj 		 && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
444938fd1498Szrj 		     || BB_HEAD (this_basic_block) != temp_insn);
445038fd1498Szrj 		 temp_insn = NEXT_INSN (temp_insn))
445138fd1498Szrj 	      if (temp_insn != i3 && NONDEBUG_INSN_P (temp_insn))
445238fd1498Szrj 		FOR_EACH_LOG_LINK (link, temp_insn)
445338fd1498Szrj 		  if (link->insn == i2)
445438fd1498Szrj 		    link->insn = i3;
445538fd1498Szrj 
445638fd1498Szrj 	if (i3notes)
445738fd1498Szrj 	  {
445838fd1498Szrj 	    rtx link = i3notes;
445938fd1498Szrj 	    while (XEXP (link, 1))
446038fd1498Szrj 	      link = XEXP (link, 1);
446138fd1498Szrj 	    XEXP (link, 1) = i2notes;
446238fd1498Szrj 	  }
446338fd1498Szrj 	else
446438fd1498Szrj 	  i3notes = i2notes;
446538fd1498Szrj 	i2notes = 0;
446638fd1498Szrj       }
446738fd1498Szrj 
446838fd1498Szrj     LOG_LINKS (i3) = NULL;
446938fd1498Szrj     REG_NOTES (i3) = 0;
447038fd1498Szrj     LOG_LINKS (i2) = NULL;
447138fd1498Szrj     REG_NOTES (i2) = 0;
447238fd1498Szrj 
447338fd1498Szrj     if (newi2pat)
447438fd1498Szrj       {
447538fd1498Szrj 	if (MAY_HAVE_DEBUG_BIND_INSNS && i2scratch)
447638fd1498Szrj 	  propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
447738fd1498Szrj 			       this_basic_block);
447838fd1498Szrj 	INSN_CODE (i2) = i2_code_number;
447938fd1498Szrj 	PATTERN (i2) = newi2pat;
448038fd1498Szrj       }
448138fd1498Szrj     else
448238fd1498Szrj       {
448338fd1498Szrj 	if (MAY_HAVE_DEBUG_BIND_INSNS && i2src)
448438fd1498Szrj 	  propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
448538fd1498Szrj 			       this_basic_block);
448638fd1498Szrj 	SET_INSN_DELETED (i2);
448738fd1498Szrj       }
448838fd1498Szrj 
448938fd1498Szrj     if (i1)
449038fd1498Szrj       {
449138fd1498Szrj 	LOG_LINKS (i1) = NULL;
449238fd1498Szrj 	REG_NOTES (i1) = 0;
449338fd1498Szrj 	if (MAY_HAVE_DEBUG_BIND_INSNS)
449438fd1498Szrj 	  propagate_for_debug (i1, last_combined_insn, i1dest, i1src,
449538fd1498Szrj 			       this_basic_block);
449638fd1498Szrj 	SET_INSN_DELETED (i1);
449738fd1498Szrj       }
449838fd1498Szrj 
449938fd1498Szrj     if (i0)
450038fd1498Szrj       {
450138fd1498Szrj 	LOG_LINKS (i0) = NULL;
450238fd1498Szrj 	REG_NOTES (i0) = 0;
450338fd1498Szrj 	if (MAY_HAVE_DEBUG_BIND_INSNS)
450438fd1498Szrj 	  propagate_for_debug (i0, last_combined_insn, i0dest, i0src,
450538fd1498Szrj 			       this_basic_block);
450638fd1498Szrj 	SET_INSN_DELETED (i0);
450738fd1498Szrj       }
450838fd1498Szrj 
450938fd1498Szrj     /* Get death notes for everything that is now used in either I3 or
451038fd1498Szrj        I2 and used to die in a previous insn.  If we built two new
451138fd1498Szrj        patterns, move from I1 to I2 then I2 to I3 so that we get the
451238fd1498Szrj        proper movement on registers that I2 modifies.  */
451338fd1498Szrj 
451438fd1498Szrj     if (i0)
451538fd1498Szrj       from_luid = DF_INSN_LUID (i0);
451638fd1498Szrj     else if (i1)
451738fd1498Szrj       from_luid = DF_INSN_LUID (i1);
451838fd1498Szrj     else
451938fd1498Szrj       from_luid = DF_INSN_LUID (i2);
452038fd1498Szrj     if (newi2pat)
452138fd1498Szrj       move_deaths (newi2pat, NULL_RTX, from_luid, i2, &midnotes);
452238fd1498Szrj     move_deaths (newpat, newi2pat, from_luid, i3, &midnotes);
452338fd1498Szrj 
452438fd1498Szrj     /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
452538fd1498Szrj     if (i3notes)
452638fd1498Szrj       distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL,
452738fd1498Szrj 			elim_i2, elim_i1, elim_i0);
452838fd1498Szrj     if (i2notes)
452938fd1498Szrj       distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL,
453038fd1498Szrj 			elim_i2, elim_i1, elim_i0);
453138fd1498Szrj     if (i1notes)
453238fd1498Szrj       distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL,
453338fd1498Szrj 			elim_i2, local_elim_i1, local_elim_i0);
453438fd1498Szrj     if (i0notes)
453538fd1498Szrj       distribute_notes (i0notes, i0, i3, newi2pat ? i2 : NULL,
453638fd1498Szrj 			elim_i2, elim_i1, local_elim_i0);
453738fd1498Szrj     if (midnotes)
453838fd1498Szrj       distribute_notes (midnotes, NULL, i3, newi2pat ? i2 : NULL,
453938fd1498Szrj 			elim_i2, elim_i1, elim_i0);
454038fd1498Szrj 
454138fd1498Szrj     /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
454238fd1498Szrj        know these are REG_UNUSED and want them to go to the desired insn,
454338fd1498Szrj        so we always pass it as i3.  */
454438fd1498Szrj 
454538fd1498Szrj     if (newi2pat && new_i2_notes)
454638fd1498Szrj       distribute_notes (new_i2_notes, i2, i2, NULL, NULL_RTX, NULL_RTX,
454738fd1498Szrj 			NULL_RTX);
454838fd1498Szrj 
454938fd1498Szrj     if (new_i3_notes)
455038fd1498Szrj       distribute_notes (new_i3_notes, i3, i3, NULL, NULL_RTX, NULL_RTX,
455138fd1498Szrj 			NULL_RTX);
455238fd1498Szrj 
455338fd1498Szrj     /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
455438fd1498Szrj        put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
455538fd1498Szrj        I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
455638fd1498Szrj        in that case, it might delete I2.  Similarly for I2 and I1.
455738fd1498Szrj        Show an additional death due to the REG_DEAD note we make here.  If
455838fd1498Szrj        we discard it in distribute_notes, we will decrement it again.  */
455938fd1498Szrj 
456038fd1498Szrj     if (i3dest_killed)
456138fd1498Szrj       {
456238fd1498Szrj 	rtx new_note = alloc_reg_note (REG_DEAD, i3dest_killed, NULL_RTX);
456338fd1498Szrj 	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
456438fd1498Szrj 	  distribute_notes (new_note, NULL, i2, NULL, elim_i2,
456538fd1498Szrj 			    elim_i1, elim_i0);
456638fd1498Szrj 	else
456738fd1498Szrj 	  distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
456838fd1498Szrj 			    elim_i2, elim_i1, elim_i0);
456938fd1498Szrj       }
457038fd1498Szrj 
457138fd1498Szrj     if (i2dest_in_i2src)
457238fd1498Szrj       {
457338fd1498Szrj 	rtx new_note = alloc_reg_note (REG_DEAD, i2dest, NULL_RTX);
457438fd1498Szrj 	if (newi2pat && reg_set_p (i2dest, newi2pat))
457538fd1498Szrj 	  distribute_notes (new_note,  NULL, i2, NULL, NULL_RTX,
457638fd1498Szrj 			    NULL_RTX, NULL_RTX);
457738fd1498Szrj 	else
457838fd1498Szrj 	  distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
457938fd1498Szrj 			    NULL_RTX, NULL_RTX, NULL_RTX);
458038fd1498Szrj       }
458138fd1498Szrj 
458238fd1498Szrj     if (i1dest_in_i1src)
458338fd1498Szrj       {
458438fd1498Szrj 	rtx new_note = alloc_reg_note (REG_DEAD, i1dest, NULL_RTX);
458538fd1498Szrj 	if (newi2pat && reg_set_p (i1dest, newi2pat))
458638fd1498Szrj 	  distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
458738fd1498Szrj 			    NULL_RTX, NULL_RTX);
458838fd1498Szrj 	else
458938fd1498Szrj 	  distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
459038fd1498Szrj 			    NULL_RTX, NULL_RTX, NULL_RTX);
459138fd1498Szrj       }
459238fd1498Szrj 
459338fd1498Szrj     if (i0dest_in_i0src)
459438fd1498Szrj       {
459538fd1498Szrj 	rtx new_note = alloc_reg_note (REG_DEAD, i0dest, NULL_RTX);
459638fd1498Szrj 	if (newi2pat && reg_set_p (i0dest, newi2pat))
459738fd1498Szrj 	  distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
459838fd1498Szrj 			    NULL_RTX, NULL_RTX);
459938fd1498Szrj 	else
460038fd1498Szrj 	  distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
460138fd1498Szrj 			    NULL_RTX, NULL_RTX, NULL_RTX);
460238fd1498Szrj       }
460338fd1498Szrj 
460438fd1498Szrj     distribute_links (i3links);
460538fd1498Szrj     distribute_links (i2links);
460638fd1498Szrj     distribute_links (i1links);
460738fd1498Szrj     distribute_links (i0links);
460838fd1498Szrj 
460938fd1498Szrj     if (REG_P (i2dest))
461038fd1498Szrj       {
461138fd1498Szrj 	struct insn_link *link;
461238fd1498Szrj 	rtx_insn *i2_insn = 0;
461338fd1498Szrj 	rtx i2_val = 0, set;
461438fd1498Szrj 
461538fd1498Szrj 	/* The insn that used to set this register doesn't exist, and
461638fd1498Szrj 	   this life of the register may not exist either.  See if one of
461738fd1498Szrj 	   I3's links points to an insn that sets I2DEST.  If it does,
461838fd1498Szrj 	   that is now the last known value for I2DEST. If we don't update
461938fd1498Szrj 	   this and I2 set the register to a value that depended on its old
462038fd1498Szrj 	   contents, we will get confused.  If this insn is used, thing
462138fd1498Szrj 	   will be set correctly in combine_instructions.  */
462238fd1498Szrj 	FOR_EACH_LOG_LINK (link, i3)
462338fd1498Szrj 	  if ((set = single_set (link->insn)) != 0
462438fd1498Szrj 	      && rtx_equal_p (i2dest, SET_DEST (set)))
462538fd1498Szrj 	    i2_insn = link->insn, i2_val = SET_SRC (set);
462638fd1498Szrj 
462738fd1498Szrj 	record_value_for_reg (i2dest, i2_insn, i2_val);
462838fd1498Szrj 
462938fd1498Szrj 	/* If the reg formerly set in I2 died only once and that was in I3,
463038fd1498Szrj 	   zero its use count so it won't make `reload' do any work.  */
463138fd1498Szrj 	if (! added_sets_2
463238fd1498Szrj 	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
463338fd1498Szrj 	    && ! i2dest_in_i2src
463438fd1498Szrj 	    && REGNO (i2dest) < reg_n_sets_max)
463538fd1498Szrj 	  INC_REG_N_SETS (REGNO (i2dest), -1);
463638fd1498Szrj       }
463738fd1498Szrj 
463838fd1498Szrj     if (i1 && REG_P (i1dest))
463938fd1498Szrj       {
464038fd1498Szrj 	struct insn_link *link;
464138fd1498Szrj 	rtx_insn *i1_insn = 0;
464238fd1498Szrj 	rtx i1_val = 0, set;
464338fd1498Szrj 
464438fd1498Szrj 	FOR_EACH_LOG_LINK (link, i3)
464538fd1498Szrj 	  if ((set = single_set (link->insn)) != 0
464638fd1498Szrj 	      && rtx_equal_p (i1dest, SET_DEST (set)))
464738fd1498Szrj 	    i1_insn = link->insn, i1_val = SET_SRC (set);
464838fd1498Szrj 
464938fd1498Szrj 	record_value_for_reg (i1dest, i1_insn, i1_val);
465038fd1498Szrj 
465138fd1498Szrj 	if (! added_sets_1
465238fd1498Szrj 	    && ! i1dest_in_i1src
465338fd1498Szrj 	    && REGNO (i1dest) < reg_n_sets_max)
465438fd1498Szrj 	  INC_REG_N_SETS (REGNO (i1dest), -1);
465538fd1498Szrj       }
465638fd1498Szrj 
465738fd1498Szrj     if (i0 && REG_P (i0dest))
465838fd1498Szrj       {
465938fd1498Szrj 	struct insn_link *link;
466038fd1498Szrj 	rtx_insn *i0_insn = 0;
466138fd1498Szrj 	rtx i0_val = 0, set;
466238fd1498Szrj 
466338fd1498Szrj 	FOR_EACH_LOG_LINK (link, i3)
466438fd1498Szrj 	  if ((set = single_set (link->insn)) != 0
466538fd1498Szrj 	      && rtx_equal_p (i0dest, SET_DEST (set)))
466638fd1498Szrj 	    i0_insn = link->insn, i0_val = SET_SRC (set);
466738fd1498Szrj 
466838fd1498Szrj 	record_value_for_reg (i0dest, i0_insn, i0_val);
466938fd1498Szrj 
467038fd1498Szrj 	if (! added_sets_0
467138fd1498Szrj 	    && ! i0dest_in_i0src
467238fd1498Szrj 	    && REGNO (i0dest) < reg_n_sets_max)
467338fd1498Szrj 	  INC_REG_N_SETS (REGNO (i0dest), -1);
467438fd1498Szrj       }
467538fd1498Szrj 
467638fd1498Szrj     /* Update reg_stat[].nonzero_bits et al for any changes that may have
467738fd1498Szrj        been made to this insn.  The order is important, because newi2pat
467838fd1498Szrj        can affect nonzero_bits of newpat.  */
467938fd1498Szrj     if (newi2pat)
468038fd1498Szrj       note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
468138fd1498Szrj     note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
468238fd1498Szrj   }
468338fd1498Szrj 
468438fd1498Szrj   if (undobuf.other_insn != NULL_RTX)
468538fd1498Szrj     {
468638fd1498Szrj       if (dump_file)
468738fd1498Szrj 	{
468838fd1498Szrj 	  fprintf (dump_file, "modifying other_insn ");
468938fd1498Szrj 	  dump_insn_slim (dump_file, undobuf.other_insn);
469038fd1498Szrj 	}
469138fd1498Szrj       df_insn_rescan (undobuf.other_insn);
469238fd1498Szrj     }
469338fd1498Szrj 
469438fd1498Szrj   if (i0 && !(NOTE_P (i0) && (NOTE_KIND (i0) == NOTE_INSN_DELETED)))
469538fd1498Szrj     {
469638fd1498Szrj       if (dump_file)
469738fd1498Szrj 	{
469838fd1498Szrj 	  fprintf (dump_file, "modifying insn i0 ");
469938fd1498Szrj 	  dump_insn_slim (dump_file, i0);
470038fd1498Szrj 	}
470138fd1498Szrj       df_insn_rescan (i0);
470238fd1498Szrj     }
470338fd1498Szrj 
470438fd1498Szrj   if (i1 && !(NOTE_P (i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
470538fd1498Szrj     {
470638fd1498Szrj       if (dump_file)
470738fd1498Szrj 	{
470838fd1498Szrj 	  fprintf (dump_file, "modifying insn i1 ");
470938fd1498Szrj 	  dump_insn_slim (dump_file, i1);
471038fd1498Szrj 	}
471138fd1498Szrj       df_insn_rescan (i1);
471238fd1498Szrj     }
471338fd1498Szrj 
471438fd1498Szrj   if (i2 && !(NOTE_P (i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
471538fd1498Szrj     {
471638fd1498Szrj       if (dump_file)
471738fd1498Szrj 	{
471838fd1498Szrj 	  fprintf (dump_file, "modifying insn i2 ");
471938fd1498Szrj 	  dump_insn_slim (dump_file, i2);
472038fd1498Szrj 	}
472138fd1498Szrj       df_insn_rescan (i2);
472238fd1498Szrj     }
472338fd1498Szrj 
472438fd1498Szrj   if (i3 && !(NOTE_P (i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
472538fd1498Szrj     {
472638fd1498Szrj       if (dump_file)
472738fd1498Szrj 	{
472838fd1498Szrj 	  fprintf (dump_file, "modifying insn i3 ");
472938fd1498Szrj 	  dump_insn_slim (dump_file, i3);
473038fd1498Szrj 	}
473138fd1498Szrj       df_insn_rescan (i3);
473238fd1498Szrj     }
473338fd1498Szrj 
473438fd1498Szrj   /* Set new_direct_jump_p if a new return or simple jump instruction
473538fd1498Szrj      has been created.  Adjust the CFG accordingly.  */
473638fd1498Szrj   if (returnjump_p (i3) || any_uncondjump_p (i3))
473738fd1498Szrj     {
473838fd1498Szrj       *new_direct_jump_p = 1;
473938fd1498Szrj       mark_jump_label (PATTERN (i3), i3, 0);
474038fd1498Szrj       update_cfg_for_uncondjump (i3);
474138fd1498Szrj     }
474238fd1498Szrj 
474338fd1498Szrj   if (undobuf.other_insn != NULL_RTX
474438fd1498Szrj       && (returnjump_p (undobuf.other_insn)
474538fd1498Szrj 	  || any_uncondjump_p (undobuf.other_insn)))
474638fd1498Szrj     {
474738fd1498Szrj       *new_direct_jump_p = 1;
474838fd1498Szrj       update_cfg_for_uncondjump (undobuf.other_insn);
474938fd1498Szrj     }
475038fd1498Szrj 
475138fd1498Szrj   if (GET_CODE (PATTERN (i3)) == TRAP_IF
475238fd1498Szrj       && XEXP (PATTERN (i3), 0) == const1_rtx)
475338fd1498Szrj     {
475438fd1498Szrj       basic_block bb = BLOCK_FOR_INSN (i3);
475538fd1498Szrj       gcc_assert (bb);
475638fd1498Szrj       remove_edge (split_block (bb, i3));
475738fd1498Szrj       emit_barrier_after_bb (bb);
475838fd1498Szrj       *new_direct_jump_p = 1;
475938fd1498Szrj     }
476038fd1498Szrj 
476138fd1498Szrj   if (undobuf.other_insn
476238fd1498Szrj       && GET_CODE (PATTERN (undobuf.other_insn)) == TRAP_IF
476338fd1498Szrj       && XEXP (PATTERN (undobuf.other_insn), 0) == const1_rtx)
476438fd1498Szrj     {
476538fd1498Szrj       basic_block bb = BLOCK_FOR_INSN (undobuf.other_insn);
476638fd1498Szrj       gcc_assert (bb);
476738fd1498Szrj       remove_edge (split_block (bb, undobuf.other_insn));
476838fd1498Szrj       emit_barrier_after_bb (bb);
476938fd1498Szrj       *new_direct_jump_p = 1;
477038fd1498Szrj     }
477138fd1498Szrj 
477238fd1498Szrj   /* A noop might also need cleaning up of CFG, if it comes from the
477338fd1498Szrj      simplification of a jump.  */
477438fd1498Szrj   if (JUMP_P (i3)
477538fd1498Szrj       && GET_CODE (newpat) == SET
477638fd1498Szrj       && SET_SRC (newpat) == pc_rtx
477738fd1498Szrj       && SET_DEST (newpat) == pc_rtx)
477838fd1498Szrj     {
477938fd1498Szrj       *new_direct_jump_p = 1;
478038fd1498Szrj       update_cfg_for_uncondjump (i3);
478138fd1498Szrj     }
478238fd1498Szrj 
478338fd1498Szrj   if (undobuf.other_insn != NULL_RTX
478438fd1498Szrj       && JUMP_P (undobuf.other_insn)
478538fd1498Szrj       && GET_CODE (PATTERN (undobuf.other_insn)) == SET
478638fd1498Szrj       && SET_SRC (PATTERN (undobuf.other_insn)) == pc_rtx
478738fd1498Szrj       && SET_DEST (PATTERN (undobuf.other_insn)) == pc_rtx)
478838fd1498Szrj     {
478938fd1498Szrj       *new_direct_jump_p = 1;
479038fd1498Szrj       update_cfg_for_uncondjump (undobuf.other_insn);
479138fd1498Szrj     }
479238fd1498Szrj 
479338fd1498Szrj   combine_successes++;
479438fd1498Szrj   undo_commit ();
479538fd1498Szrj 
479638fd1498Szrj   rtx_insn *ret = newi2pat ? i2 : i3;
479738fd1498Szrj   if (added_links_insn && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (ret))
479838fd1498Szrj     ret = added_links_insn;
479938fd1498Szrj   if (added_notes_insn && DF_INSN_LUID (added_notes_insn) < DF_INSN_LUID (ret))
480038fd1498Szrj     ret = added_notes_insn;
480138fd1498Szrj 
480238fd1498Szrj   return ret;
480338fd1498Szrj }
480438fd1498Szrj 
480538fd1498Szrj /* Get a marker for undoing to the current state.  */
480638fd1498Szrj 
480738fd1498Szrj static void *
get_undo_marker(void)480838fd1498Szrj get_undo_marker (void)
480938fd1498Szrj {
481038fd1498Szrj   return undobuf.undos;
481138fd1498Szrj }
481238fd1498Szrj 
481338fd1498Szrj /* Undo the modifications up to the marker.  */
481438fd1498Szrj 
481538fd1498Szrj static void
undo_to_marker(void * marker)481638fd1498Szrj undo_to_marker (void *marker)
481738fd1498Szrj {
481838fd1498Szrj   struct undo *undo, *next;
481938fd1498Szrj 
482038fd1498Szrj   for (undo = undobuf.undos; undo != marker; undo = next)
482138fd1498Szrj     {
482238fd1498Szrj       gcc_assert (undo);
482338fd1498Szrj 
482438fd1498Szrj       next = undo->next;
482538fd1498Szrj       switch (undo->kind)
482638fd1498Szrj 	{
482738fd1498Szrj 	case UNDO_RTX:
482838fd1498Szrj 	  *undo->where.r = undo->old_contents.r;
482938fd1498Szrj 	  break;
483038fd1498Szrj 	case UNDO_INT:
483138fd1498Szrj 	  *undo->where.i = undo->old_contents.i;
483238fd1498Szrj 	  break;
483338fd1498Szrj 	case UNDO_MODE:
483438fd1498Szrj 	  adjust_reg_mode (*undo->where.r, undo->old_contents.m);
483538fd1498Szrj 	  break;
483638fd1498Szrj 	case UNDO_LINKS:
483738fd1498Szrj 	  *undo->where.l = undo->old_contents.l;
483838fd1498Szrj 	  break;
483938fd1498Szrj 	default:
484038fd1498Szrj 	  gcc_unreachable ();
484138fd1498Szrj 	}
484238fd1498Szrj 
484338fd1498Szrj       undo->next = undobuf.frees;
484438fd1498Szrj       undobuf.frees = undo;
484538fd1498Szrj     }
484638fd1498Szrj 
484738fd1498Szrj   undobuf.undos = (struct undo *) marker;
484838fd1498Szrj }
484938fd1498Szrj 
485038fd1498Szrj /* Undo all the modifications recorded in undobuf.  */
485138fd1498Szrj 
485238fd1498Szrj static void
undo_all(void)485338fd1498Szrj undo_all (void)
485438fd1498Szrj {
485538fd1498Szrj   undo_to_marker (0);
485638fd1498Szrj }
485738fd1498Szrj 
485838fd1498Szrj /* We've committed to accepting the changes we made.  Move all
485938fd1498Szrj    of the undos to the free list.  */
486038fd1498Szrj 
486138fd1498Szrj static void
undo_commit(void)486238fd1498Szrj undo_commit (void)
486338fd1498Szrj {
486438fd1498Szrj   struct undo *undo, *next;
486538fd1498Szrj 
486638fd1498Szrj   for (undo = undobuf.undos; undo; undo = next)
486738fd1498Szrj     {
486838fd1498Szrj       next = undo->next;
486938fd1498Szrj       undo->next = undobuf.frees;
487038fd1498Szrj       undobuf.frees = undo;
487138fd1498Szrj     }
487238fd1498Szrj   undobuf.undos = 0;
487338fd1498Szrj }
487438fd1498Szrj 
487538fd1498Szrj /* Find the innermost point within the rtx at LOC, possibly LOC itself,
487638fd1498Szrj    where we have an arithmetic expression and return that point.  LOC will
487738fd1498Szrj    be inside INSN.
487838fd1498Szrj 
487938fd1498Szrj    try_combine will call this function to see if an insn can be split into
488038fd1498Szrj    two insns.  */
488138fd1498Szrj 
488238fd1498Szrj static rtx *
find_split_point(rtx * loc,rtx_insn * insn,bool set_src)488338fd1498Szrj find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
488438fd1498Szrj {
488538fd1498Szrj   rtx x = *loc;
488638fd1498Szrj   enum rtx_code code = GET_CODE (x);
488738fd1498Szrj   rtx *split;
488838fd1498Szrj   unsigned HOST_WIDE_INT len = 0;
488938fd1498Szrj   HOST_WIDE_INT pos = 0;
489038fd1498Szrj   int unsignedp = 0;
489138fd1498Szrj   rtx inner = NULL_RTX;
489238fd1498Szrj   scalar_int_mode mode, inner_mode;
489338fd1498Szrj 
489438fd1498Szrj   /* First special-case some codes.  */
489538fd1498Szrj   switch (code)
489638fd1498Szrj     {
489738fd1498Szrj     case SUBREG:
489838fd1498Szrj #ifdef INSN_SCHEDULING
489938fd1498Szrj       /* If we are making a paradoxical SUBREG invalid, it becomes a split
490038fd1498Szrj 	 point.  */
490138fd1498Szrj       if (MEM_P (SUBREG_REG (x)))
490238fd1498Szrj 	return loc;
490338fd1498Szrj #endif
490438fd1498Szrj       return find_split_point (&SUBREG_REG (x), insn, false);
490538fd1498Szrj 
490638fd1498Szrj     case MEM:
490738fd1498Szrj       /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
490838fd1498Szrj 	 using LO_SUM and HIGH.  */
490938fd1498Szrj       if (HAVE_lo_sum && (GET_CODE (XEXP (x, 0)) == CONST
491038fd1498Szrj 			  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF))
491138fd1498Szrj 	{
491238fd1498Szrj 	  machine_mode address_mode = get_address_mode (x);
491338fd1498Szrj 
491438fd1498Szrj 	  SUBST (XEXP (x, 0),
491538fd1498Szrj 		 gen_rtx_LO_SUM (address_mode,
491638fd1498Szrj 				 gen_rtx_HIGH (address_mode, XEXP (x, 0)),
491738fd1498Szrj 				 XEXP (x, 0)));
491838fd1498Szrj 	  return &XEXP (XEXP (x, 0), 0);
491938fd1498Szrj 	}
492038fd1498Szrj 
492138fd1498Szrj       /* If we have a PLUS whose second operand is a constant and the
492238fd1498Szrj 	 address is not valid, perhaps will can split it up using
492338fd1498Szrj 	 the machine-specific way to split large constants.  We use
492438fd1498Szrj 	 the first pseudo-reg (one of the virtual regs) as a placeholder;
492538fd1498Szrj 	 it will not remain in the result.  */
492638fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == PLUS
492738fd1498Szrj 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
492838fd1498Szrj 	  && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
492938fd1498Szrj 					    MEM_ADDR_SPACE (x)))
493038fd1498Szrj 	{
493138fd1498Szrj 	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
493238fd1498Szrj 	  rtx_insn *seq = combine_split_insns (gen_rtx_SET (reg, XEXP (x, 0)),
493338fd1498Szrj 					       subst_insn);
493438fd1498Szrj 
493538fd1498Szrj 	  /* This should have produced two insns, each of which sets our
493638fd1498Szrj 	     placeholder.  If the source of the second is a valid address,
493738fd1498Szrj 	     we can make put both sources together and make a split point
493838fd1498Szrj 	     in the middle.  */
493938fd1498Szrj 
494038fd1498Szrj 	  if (seq
494138fd1498Szrj 	      && NEXT_INSN (seq) != NULL_RTX
494238fd1498Szrj 	      && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
494338fd1498Szrj 	      && NONJUMP_INSN_P (seq)
494438fd1498Szrj 	      && GET_CODE (PATTERN (seq)) == SET
494538fd1498Szrj 	      && SET_DEST (PATTERN (seq)) == reg
494638fd1498Szrj 	      && ! reg_mentioned_p (reg,
494738fd1498Szrj 				    SET_SRC (PATTERN (seq)))
494838fd1498Szrj 	      && NONJUMP_INSN_P (NEXT_INSN (seq))
494938fd1498Szrj 	      && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
495038fd1498Szrj 	      && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
495138fd1498Szrj 	      && memory_address_addr_space_p
495238fd1498Szrj 		   (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
495338fd1498Szrj 		    MEM_ADDR_SPACE (x)))
495438fd1498Szrj 	    {
495538fd1498Szrj 	      rtx src1 = SET_SRC (PATTERN (seq));
495638fd1498Szrj 	      rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
495738fd1498Szrj 
495838fd1498Szrj 	      /* Replace the placeholder in SRC2 with SRC1.  If we can
495938fd1498Szrj 		 find where in SRC2 it was placed, that can become our
496038fd1498Szrj 		 split point and we can replace this address with SRC2.
496138fd1498Szrj 		 Just try two obvious places.  */
496238fd1498Szrj 
496338fd1498Szrj 	      src2 = replace_rtx (src2, reg, src1);
496438fd1498Szrj 	      split = 0;
496538fd1498Szrj 	      if (XEXP (src2, 0) == src1)
496638fd1498Szrj 		split = &XEXP (src2, 0);
496738fd1498Szrj 	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
496838fd1498Szrj 		       && XEXP (XEXP (src2, 0), 0) == src1)
496938fd1498Szrj 		split = &XEXP (XEXP (src2, 0), 0);
497038fd1498Szrj 
497138fd1498Szrj 	      if (split)
497238fd1498Szrj 		{
497338fd1498Szrj 		  SUBST (XEXP (x, 0), src2);
497438fd1498Szrj 		  return split;
497538fd1498Szrj 		}
497638fd1498Szrj 	    }
497738fd1498Szrj 
497838fd1498Szrj 	  /* If that didn't work, perhaps the first operand is complex and
497938fd1498Szrj 	     needs to be computed separately, so make a split point there.
498038fd1498Szrj 	     This will occur on machines that just support REG + CONST
498138fd1498Szrj 	     and have a constant moved through some previous computation.  */
498238fd1498Szrj 
498338fd1498Szrj 	  else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
498438fd1498Szrj 		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
498538fd1498Szrj 			 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
498638fd1498Szrj 	    return &XEXP (XEXP (x, 0), 0);
498738fd1498Szrj 	}
498838fd1498Szrj 
498938fd1498Szrj       /* If we have a PLUS whose first operand is complex, try computing it
499038fd1498Szrj          separately by making a split there.  */
499138fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == PLUS
499238fd1498Szrj           && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
499338fd1498Szrj 					    MEM_ADDR_SPACE (x))
499438fd1498Szrj           && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
499538fd1498Szrj           && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
499638fd1498Szrj                 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
499738fd1498Szrj         return &XEXP (XEXP (x, 0), 0);
499838fd1498Szrj       break;
499938fd1498Szrj 
500038fd1498Szrj     case SET:
500138fd1498Szrj       /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
500238fd1498Szrj 	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
500338fd1498Szrj 	 we need to put the operand into a register.  So split at that
500438fd1498Szrj 	 point.  */
500538fd1498Szrj 
500638fd1498Szrj       if (SET_DEST (x) == cc0_rtx
500738fd1498Szrj 	  && GET_CODE (SET_SRC (x)) != COMPARE
500838fd1498Szrj 	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
500938fd1498Szrj 	  && !OBJECT_P (SET_SRC (x))
501038fd1498Szrj 	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
501138fd1498Szrj 		&& OBJECT_P (SUBREG_REG (SET_SRC (x)))))
501238fd1498Szrj 	return &SET_SRC (x);
501338fd1498Szrj 
501438fd1498Szrj       /* See if we can split SET_SRC as it stands.  */
501538fd1498Szrj       split = find_split_point (&SET_SRC (x), insn, true);
501638fd1498Szrj       if (split && split != &SET_SRC (x))
501738fd1498Szrj 	return split;
501838fd1498Szrj 
501938fd1498Szrj       /* See if we can split SET_DEST as it stands.  */
502038fd1498Szrj       split = find_split_point (&SET_DEST (x), insn, false);
502138fd1498Szrj       if (split && split != &SET_DEST (x))
502238fd1498Szrj 	return split;
502338fd1498Szrj 
502438fd1498Szrj       /* See if this is a bitfield assignment with everything constant.  If
502538fd1498Szrj 	 so, this is an IOR of an AND, so split it into that.  */
502638fd1498Szrj       if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
502738fd1498Szrj 	  && is_a <scalar_int_mode> (GET_MODE (XEXP (SET_DEST (x), 0)),
502838fd1498Szrj 				     &inner_mode)
502938fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (inner_mode)
503038fd1498Szrj 	  && CONST_INT_P (XEXP (SET_DEST (x), 1))
503138fd1498Szrj 	  && CONST_INT_P (XEXP (SET_DEST (x), 2))
503238fd1498Szrj 	  && CONST_INT_P (SET_SRC (x))
503338fd1498Szrj 	  && ((INTVAL (XEXP (SET_DEST (x), 1))
503438fd1498Szrj 	       + INTVAL (XEXP (SET_DEST (x), 2)))
503538fd1498Szrj 	      <= GET_MODE_PRECISION (inner_mode))
503638fd1498Szrj 	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
503738fd1498Szrj 	{
503838fd1498Szrj 	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
503938fd1498Szrj 	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
504038fd1498Szrj 	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
504138fd1498Szrj 	  rtx dest = XEXP (SET_DEST (x), 0);
504238fd1498Szrj 	  unsigned HOST_WIDE_INT mask
504338fd1498Szrj 	    = (HOST_WIDE_INT_1U << len) - 1;
504438fd1498Szrj 	  rtx or_mask;
504538fd1498Szrj 
504638fd1498Szrj 	  if (BITS_BIG_ENDIAN)
504738fd1498Szrj 	    pos = GET_MODE_PRECISION (inner_mode) - len - pos;
504838fd1498Szrj 
504938fd1498Szrj 	  or_mask = gen_int_mode (src << pos, inner_mode);
505038fd1498Szrj 	  if (src == mask)
505138fd1498Szrj 	    SUBST (SET_SRC (x),
505238fd1498Szrj 		   simplify_gen_binary (IOR, inner_mode, dest, or_mask));
505338fd1498Szrj 	  else
505438fd1498Szrj 	    {
505538fd1498Szrj 	      rtx negmask = gen_int_mode (~(mask << pos), inner_mode);
505638fd1498Szrj 	      SUBST (SET_SRC (x),
505738fd1498Szrj 		     simplify_gen_binary (IOR, inner_mode,
505838fd1498Szrj 					  simplify_gen_binary (AND, inner_mode,
505938fd1498Szrj 							       dest, negmask),
506038fd1498Szrj 					  or_mask));
506138fd1498Szrj 	    }
506238fd1498Szrj 
506338fd1498Szrj 	  SUBST (SET_DEST (x), dest);
506438fd1498Szrj 
506538fd1498Szrj 	  split = find_split_point (&SET_SRC (x), insn, true);
506638fd1498Szrj 	  if (split && split != &SET_SRC (x))
506738fd1498Szrj 	    return split;
506838fd1498Szrj 	}
506938fd1498Szrj 
507038fd1498Szrj       /* Otherwise, see if this is an operation that we can split into two.
507138fd1498Szrj 	 If so, try to split that.  */
507238fd1498Szrj       code = GET_CODE (SET_SRC (x));
507338fd1498Szrj 
507438fd1498Szrj       switch (code)
507538fd1498Szrj 	{
507638fd1498Szrj 	case AND:
507738fd1498Szrj 	  /* If we are AND'ing with a large constant that is only a single
507838fd1498Szrj 	     bit and the result is only being used in a context where we
507938fd1498Szrj 	     need to know if it is zero or nonzero, replace it with a bit
508038fd1498Szrj 	     extraction.  This will avoid the large constant, which might
508138fd1498Szrj 	     have taken more than one insn to make.  If the constant were
508238fd1498Szrj 	     not a valid argument to the AND but took only one insn to make,
508338fd1498Szrj 	     this is no worse, but if it took more than one insn, it will
508438fd1498Szrj 	     be better.  */
508538fd1498Szrj 
508638fd1498Szrj 	  if (CONST_INT_P (XEXP (SET_SRC (x), 1))
508738fd1498Szrj 	      && REG_P (XEXP (SET_SRC (x), 0))
508838fd1498Szrj 	      && (pos = exact_log2 (UINTVAL (XEXP (SET_SRC (x), 1)))) >= 7
508938fd1498Szrj 	      && REG_P (SET_DEST (x))
509038fd1498Szrj 	      && (split = find_single_use (SET_DEST (x), insn, NULL)) != 0
509138fd1498Szrj 	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
509238fd1498Szrj 	      && XEXP (*split, 0) == SET_DEST (x)
509338fd1498Szrj 	      && XEXP (*split, 1) == const0_rtx)
509438fd1498Szrj 	    {
509538fd1498Szrj 	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
509638fd1498Szrj 						XEXP (SET_SRC (x), 0),
509738fd1498Szrj 						pos, NULL_RTX, 1, 1, 0, 0);
509838fd1498Szrj 	      if (extraction != 0)
509938fd1498Szrj 		{
510038fd1498Szrj 		  SUBST (SET_SRC (x), extraction);
510138fd1498Szrj 		  return find_split_point (loc, insn, false);
510238fd1498Szrj 		}
510338fd1498Szrj 	    }
510438fd1498Szrj 	  break;
510538fd1498Szrj 
510638fd1498Szrj 	case NE:
510738fd1498Szrj 	  /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
510838fd1498Szrj 	     is known to be on, this can be converted into a NEG of a shift.  */
510938fd1498Szrj 	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
511038fd1498Szrj 	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
511138fd1498Szrj 	      && ((pos = exact_log2 (nonzero_bits (XEXP (SET_SRC (x), 0),
511238fd1498Szrj 						   GET_MODE (XEXP (SET_SRC (x),
511338fd1498Szrj 							     0))))) >= 1))
511438fd1498Szrj 	    {
511538fd1498Szrj 	      machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
511638fd1498Szrj 	      rtx pos_rtx = gen_int_shift_amount (mode, pos);
511738fd1498Szrj 	      SUBST (SET_SRC (x),
511838fd1498Szrj 		     gen_rtx_NEG (mode,
511938fd1498Szrj 				  gen_rtx_LSHIFTRT (mode,
512038fd1498Szrj 						    XEXP (SET_SRC (x), 0),
512138fd1498Szrj 						    pos_rtx)));
512238fd1498Szrj 
512338fd1498Szrj 	      split = find_split_point (&SET_SRC (x), insn, true);
512438fd1498Szrj 	      if (split && split != &SET_SRC (x))
512538fd1498Szrj 		return split;
512638fd1498Szrj 	    }
512738fd1498Szrj 	  break;
512838fd1498Szrj 
512938fd1498Szrj 	case SIGN_EXTEND:
513038fd1498Szrj 	  inner = XEXP (SET_SRC (x), 0);
513138fd1498Szrj 
513238fd1498Szrj 	  /* We can't optimize if either mode is a partial integer
513338fd1498Szrj 	     mode as we don't know how many bits are significant
513438fd1498Szrj 	     in those modes.  */
513538fd1498Szrj 	  if (!is_int_mode (GET_MODE (inner), &inner_mode)
513638fd1498Szrj 	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
513738fd1498Szrj 	    break;
513838fd1498Szrj 
513938fd1498Szrj 	  pos = 0;
514038fd1498Szrj 	  len = GET_MODE_PRECISION (inner_mode);
514138fd1498Szrj 	  unsignedp = 0;
514238fd1498Szrj 	  break;
514338fd1498Szrj 
514438fd1498Szrj 	case SIGN_EXTRACT:
514538fd1498Szrj 	case ZERO_EXTRACT:
514638fd1498Szrj 	  if (is_a <scalar_int_mode> (GET_MODE (XEXP (SET_SRC (x), 0)),
514738fd1498Szrj 				      &inner_mode)
514838fd1498Szrj 	      && CONST_INT_P (XEXP (SET_SRC (x), 1))
514938fd1498Szrj 	      && CONST_INT_P (XEXP (SET_SRC (x), 2)))
515038fd1498Szrj 	    {
515138fd1498Szrj 	      inner = XEXP (SET_SRC (x), 0);
515238fd1498Szrj 	      len = INTVAL (XEXP (SET_SRC (x), 1));
515338fd1498Szrj 	      pos = INTVAL (XEXP (SET_SRC (x), 2));
515438fd1498Szrj 
515538fd1498Szrj 	      if (BITS_BIG_ENDIAN)
515638fd1498Szrj 		pos = GET_MODE_PRECISION (inner_mode) - len - pos;
515738fd1498Szrj 	      unsignedp = (code == ZERO_EXTRACT);
515838fd1498Szrj 	    }
515938fd1498Szrj 	  break;
516038fd1498Szrj 
516138fd1498Szrj 	default:
516238fd1498Szrj 	  break;
516338fd1498Szrj 	}
516438fd1498Szrj 
516538fd1498Szrj       if (len
516638fd1498Szrj 	  && known_subrange_p (pos, len,
516738fd1498Szrj 			       0, GET_MODE_PRECISION (GET_MODE (inner)))
516838fd1498Szrj 	  && is_a <scalar_int_mode> (GET_MODE (SET_SRC (x)), &mode))
516938fd1498Szrj 	{
517038fd1498Szrj 	  /* For unsigned, we have a choice of a shift followed by an
517138fd1498Szrj 	     AND or two shifts.  Use two shifts for field sizes where the
517238fd1498Szrj 	     constant might be too large.  We assume here that we can
517338fd1498Szrj 	     always at least get 8-bit constants in an AND insn, which is
517438fd1498Szrj 	     true for every current RISC.  */
517538fd1498Szrj 
517638fd1498Szrj 	  if (unsignedp && len <= 8)
517738fd1498Szrj 	    {
517838fd1498Szrj 	      unsigned HOST_WIDE_INT mask
517938fd1498Szrj 		= (HOST_WIDE_INT_1U << len) - 1;
518038fd1498Szrj 	      rtx pos_rtx = gen_int_shift_amount (mode, pos);
518138fd1498Szrj 	      SUBST (SET_SRC (x),
518238fd1498Szrj 		     gen_rtx_AND (mode,
518338fd1498Szrj 				  gen_rtx_LSHIFTRT
518438fd1498Szrj 				  (mode, gen_lowpart (mode, inner), pos_rtx),
518538fd1498Szrj 				  gen_int_mode (mask, mode)));
518638fd1498Szrj 
518738fd1498Szrj 	      split = find_split_point (&SET_SRC (x), insn, true);
518838fd1498Szrj 	      if (split && split != &SET_SRC (x))
518938fd1498Szrj 		return split;
519038fd1498Szrj 	    }
519138fd1498Szrj 	  else
519238fd1498Szrj 	    {
519338fd1498Szrj 	      int left_bits = GET_MODE_PRECISION (mode) - len - pos;
519438fd1498Szrj 	      int right_bits = GET_MODE_PRECISION (mode) - len;
519538fd1498Szrj 	      SUBST (SET_SRC (x),
519638fd1498Szrj 		     gen_rtx_fmt_ee
519738fd1498Szrj 		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
519838fd1498Szrj 		      gen_rtx_ASHIFT (mode,
519938fd1498Szrj 				      gen_lowpart (mode, inner),
520038fd1498Szrj 				      gen_int_shift_amount (mode, left_bits)),
520138fd1498Szrj 		      gen_int_shift_amount (mode, right_bits)));
520238fd1498Szrj 
520338fd1498Szrj 	      split = find_split_point (&SET_SRC (x), insn, true);
520438fd1498Szrj 	      if (split && split != &SET_SRC (x))
520538fd1498Szrj 		return split;
520638fd1498Szrj 	    }
520738fd1498Szrj 	}
520838fd1498Szrj 
520938fd1498Szrj       /* See if this is a simple operation with a constant as the second
521038fd1498Szrj 	 operand.  It might be that this constant is out of range and hence
521138fd1498Szrj 	 could be used as a split point.  */
521238fd1498Szrj       if (BINARY_P (SET_SRC (x))
521338fd1498Szrj 	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
521438fd1498Szrj 	  && (OBJECT_P (XEXP (SET_SRC (x), 0))
521538fd1498Szrj 	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
521638fd1498Szrj 		  && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
521738fd1498Szrj 	return &XEXP (SET_SRC (x), 1);
521838fd1498Szrj 
521938fd1498Szrj       /* Finally, see if this is a simple operation with its first operand
522038fd1498Szrj 	 not in a register.  The operation might require this operand in a
522138fd1498Szrj 	 register, so return it as a split point.  We can always do this
522238fd1498Szrj 	 because if the first operand were another operation, we would have
522338fd1498Szrj 	 already found it as a split point.  */
522438fd1498Szrj       if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
522538fd1498Szrj 	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
522638fd1498Szrj 	return &XEXP (SET_SRC (x), 0);
522738fd1498Szrj 
522838fd1498Szrj       return 0;
522938fd1498Szrj 
523038fd1498Szrj     case AND:
523138fd1498Szrj     case IOR:
523238fd1498Szrj       /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
523338fd1498Szrj 	 it is better to write this as (not (ior A B)) so we can split it.
523438fd1498Szrj 	 Similarly for IOR.  */
523538fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
523638fd1498Szrj 	{
523738fd1498Szrj 	  SUBST (*loc,
523838fd1498Szrj 		 gen_rtx_NOT (GET_MODE (x),
523938fd1498Szrj 			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
524038fd1498Szrj 					      GET_MODE (x),
524138fd1498Szrj 					      XEXP (XEXP (x, 0), 0),
524238fd1498Szrj 					      XEXP (XEXP (x, 1), 0))));
524338fd1498Szrj 	  return find_split_point (loc, insn, set_src);
524438fd1498Szrj 	}
524538fd1498Szrj 
524638fd1498Szrj       /* Many RISC machines have a large set of logical insns.  If the
524738fd1498Szrj 	 second operand is a NOT, put it first so we will try to split the
524838fd1498Szrj 	 other operand first.  */
524938fd1498Szrj       if (GET_CODE (XEXP (x, 1)) == NOT)
525038fd1498Szrj 	{
525138fd1498Szrj 	  rtx tem = XEXP (x, 0);
525238fd1498Szrj 	  SUBST (XEXP (x, 0), XEXP (x, 1));
525338fd1498Szrj 	  SUBST (XEXP (x, 1), tem);
525438fd1498Szrj 	}
525538fd1498Szrj       break;
525638fd1498Szrj 
525738fd1498Szrj     case PLUS:
525838fd1498Szrj     case MINUS:
525938fd1498Szrj       /* Canonicalization can produce (minus A (mult B C)), where C is a
526038fd1498Szrj 	 constant.  It may be better to try splitting (plus (mult B -C) A)
526138fd1498Szrj 	 instead if this isn't a multiply by a power of two.  */
526238fd1498Szrj       if (set_src && code == MINUS && GET_CODE (XEXP (x, 1)) == MULT
526338fd1498Szrj 	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
526438fd1498Szrj 	  && !pow2p_hwi (INTVAL (XEXP (XEXP (x, 1), 1))))
526538fd1498Szrj 	{
526638fd1498Szrj 	  machine_mode mode = GET_MODE (x);
526738fd1498Szrj 	  unsigned HOST_WIDE_INT this_int = INTVAL (XEXP (XEXP (x, 1), 1));
526838fd1498Szrj 	  HOST_WIDE_INT other_int = trunc_int_for_mode (-this_int, mode);
526938fd1498Szrj 	  SUBST (*loc, gen_rtx_PLUS (mode,
527038fd1498Szrj 				     gen_rtx_MULT (mode,
527138fd1498Szrj 						   XEXP (XEXP (x, 1), 0),
527238fd1498Szrj 						   gen_int_mode (other_int,
527338fd1498Szrj 								 mode)),
527438fd1498Szrj 				     XEXP (x, 0)));
527538fd1498Szrj 	  return find_split_point (loc, insn, set_src);
527638fd1498Szrj 	}
527738fd1498Szrj 
527838fd1498Szrj       /* Split at a multiply-accumulate instruction.  However if this is
527938fd1498Szrj          the SET_SRC, we likely do not have such an instruction and it's
528038fd1498Szrj          worthless to try this split.  */
528138fd1498Szrj       if (!set_src
528238fd1498Szrj 	  && (GET_CODE (XEXP (x, 0)) == MULT
528338fd1498Szrj 	      || (GET_CODE (XEXP (x, 0)) == ASHIFT
528438fd1498Szrj 		  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
528538fd1498Szrj         return loc;
528638fd1498Szrj 
528738fd1498Szrj     default:
528838fd1498Szrj       break;
528938fd1498Szrj     }
529038fd1498Szrj 
529138fd1498Szrj   /* Otherwise, select our actions depending on our rtx class.  */
529238fd1498Szrj   switch (GET_RTX_CLASS (code))
529338fd1498Szrj     {
529438fd1498Szrj     case RTX_BITFIELD_OPS:		/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
529538fd1498Szrj     case RTX_TERNARY:
529638fd1498Szrj       split = find_split_point (&XEXP (x, 2), insn, false);
529738fd1498Szrj       if (split)
529838fd1498Szrj 	return split;
529938fd1498Szrj       /* fall through */
530038fd1498Szrj     case RTX_BIN_ARITH:
530138fd1498Szrj     case RTX_COMM_ARITH:
530238fd1498Szrj     case RTX_COMPARE:
530338fd1498Szrj     case RTX_COMM_COMPARE:
530438fd1498Szrj       split = find_split_point (&XEXP (x, 1), insn, false);
530538fd1498Szrj       if (split)
530638fd1498Szrj 	return split;
530738fd1498Szrj       /* fall through */
530838fd1498Szrj     case RTX_UNARY:
530938fd1498Szrj       /* Some machines have (and (shift ...) ...) insns.  If X is not
531038fd1498Szrj 	 an AND, but XEXP (X, 0) is, use it as our split point.  */
531138fd1498Szrj       if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
531238fd1498Szrj 	return &XEXP (x, 0);
531338fd1498Szrj 
531438fd1498Szrj       split = find_split_point (&XEXP (x, 0), insn, false);
531538fd1498Szrj       if (split)
531638fd1498Szrj 	return split;
531738fd1498Szrj       return loc;
531838fd1498Szrj 
531938fd1498Szrj     default:
532038fd1498Szrj       /* Otherwise, we don't have a split point.  */
532138fd1498Szrj       return 0;
532238fd1498Szrj     }
532338fd1498Szrj }
532438fd1498Szrj 
532538fd1498Szrj /* Throughout X, replace FROM with TO, and return the result.
532638fd1498Szrj    The result is TO if X is FROM;
532738fd1498Szrj    otherwise the result is X, but its contents may have been modified.
532838fd1498Szrj    If they were modified, a record was made in undobuf so that
532938fd1498Szrj    undo_all will (among other things) return X to its original state.
533038fd1498Szrj 
533138fd1498Szrj    If the number of changes necessary is too much to record to undo,
533238fd1498Szrj    the excess changes are not made, so the result is invalid.
533338fd1498Szrj    The changes already made can still be undone.
533438fd1498Szrj    undobuf.num_undo is incremented for such changes, so by testing that
533538fd1498Szrj    the caller can tell whether the result is valid.
533638fd1498Szrj 
533738fd1498Szrj    `n_occurrences' is incremented each time FROM is replaced.
533838fd1498Szrj 
533938fd1498Szrj    IN_DEST is nonzero if we are processing the SET_DEST of a SET.
534038fd1498Szrj 
534138fd1498Szrj    IN_COND is nonzero if we are at the top level of a condition.
534238fd1498Szrj 
534338fd1498Szrj    UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
534438fd1498Szrj    by copying if `n_occurrences' is nonzero.  */
534538fd1498Szrj 
534638fd1498Szrj static rtx
subst(rtx x,rtx from,rtx to,int in_dest,int in_cond,int unique_copy)534738fd1498Szrj subst (rtx x, rtx from, rtx to, int in_dest, int in_cond, int unique_copy)
534838fd1498Szrj {
534938fd1498Szrj   enum rtx_code code = GET_CODE (x);
535038fd1498Szrj   machine_mode op0_mode = VOIDmode;
535138fd1498Szrj   const char *fmt;
535238fd1498Szrj   int len, i;
535338fd1498Szrj   rtx new_rtx;
535438fd1498Szrj 
535538fd1498Szrj /* Two expressions are equal if they are identical copies of a shared
535638fd1498Szrj    RTX or if they are both registers with the same register number
535738fd1498Szrj    and mode.  */
535838fd1498Szrj 
535938fd1498Szrj #define COMBINE_RTX_EQUAL_P(X,Y)			\
536038fd1498Szrj   ((X) == (Y)						\
536138fd1498Szrj    || (REG_P (X) && REG_P (Y)	\
536238fd1498Szrj        && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
536338fd1498Szrj 
536438fd1498Szrj   /* Do not substitute into clobbers of regs -- this will never result in
536538fd1498Szrj      valid RTL.  */
536638fd1498Szrj   if (GET_CODE (x) == CLOBBER && REG_P (XEXP (x, 0)))
536738fd1498Szrj     return x;
536838fd1498Szrj 
536938fd1498Szrj   if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
537038fd1498Szrj     {
537138fd1498Szrj       n_occurrences++;
537238fd1498Szrj       return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
537338fd1498Szrj     }
537438fd1498Szrj 
537538fd1498Szrj   /* If X and FROM are the same register but different modes, they
537638fd1498Szrj      will not have been seen as equal above.  However, the log links code
537738fd1498Szrj      will make a LOG_LINKS entry for that case.  If we do nothing, we
537838fd1498Szrj      will try to rerecognize our original insn and, when it succeeds,
537938fd1498Szrj      we will delete the feeding insn, which is incorrect.
538038fd1498Szrj 
538138fd1498Szrj      So force this insn not to match in this (rare) case.  */
538238fd1498Szrj   if (! in_dest && code == REG && REG_P (from)
538338fd1498Szrj       && reg_overlap_mentioned_p (x, from))
538438fd1498Szrj     return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
538538fd1498Szrj 
538638fd1498Szrj   /* If this is an object, we are done unless it is a MEM or LO_SUM, both
538738fd1498Szrj      of which may contain things that can be combined.  */
538838fd1498Szrj   if (code != MEM && code != LO_SUM && OBJECT_P (x))
538938fd1498Szrj     return x;
539038fd1498Szrj 
539138fd1498Szrj   /* It is possible to have a subexpression appear twice in the insn.
539238fd1498Szrj      Suppose that FROM is a register that appears within TO.
539338fd1498Szrj      Then, after that subexpression has been scanned once by `subst',
539438fd1498Szrj      the second time it is scanned, TO may be found.  If we were
539538fd1498Szrj      to scan TO here, we would find FROM within it and create a
539638fd1498Szrj      self-referent rtl structure which is completely wrong.  */
539738fd1498Szrj   if (COMBINE_RTX_EQUAL_P (x, to))
539838fd1498Szrj     return to;
539938fd1498Szrj 
540038fd1498Szrj   /* Parallel asm_operands need special attention because all of the
540138fd1498Szrj      inputs are shared across the arms.  Furthermore, unsharing the
540238fd1498Szrj      rtl results in recognition failures.  Failure to handle this case
540338fd1498Szrj      specially can result in circular rtl.
540438fd1498Szrj 
540538fd1498Szrj      Solve this by doing a normal pass across the first entry of the
540638fd1498Szrj      parallel, and only processing the SET_DESTs of the subsequent
540738fd1498Szrj      entries.  Ug.  */
540838fd1498Szrj 
540938fd1498Szrj   if (code == PARALLEL
541038fd1498Szrj       && GET_CODE (XVECEXP (x, 0, 0)) == SET
541138fd1498Szrj       && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
541238fd1498Szrj     {
541338fd1498Szrj       new_rtx = subst (XVECEXP (x, 0, 0), from, to, 0, 0, unique_copy);
541438fd1498Szrj 
541538fd1498Szrj       /* If this substitution failed, this whole thing fails.  */
541638fd1498Szrj       if (GET_CODE (new_rtx) == CLOBBER
541738fd1498Szrj 	  && XEXP (new_rtx, 0) == const0_rtx)
541838fd1498Szrj 	return new_rtx;
541938fd1498Szrj 
542038fd1498Szrj       SUBST (XVECEXP (x, 0, 0), new_rtx);
542138fd1498Szrj 
542238fd1498Szrj       for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
542338fd1498Szrj 	{
542438fd1498Szrj 	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
542538fd1498Szrj 
542638fd1498Szrj 	  if (!REG_P (dest)
542738fd1498Szrj 	      && GET_CODE (dest) != CC0
542838fd1498Szrj 	      && GET_CODE (dest) != PC)
542938fd1498Szrj 	    {
543038fd1498Szrj 	      new_rtx = subst (dest, from, to, 0, 0, unique_copy);
543138fd1498Szrj 
543238fd1498Szrj 	      /* If this substitution failed, this whole thing fails.  */
543338fd1498Szrj 	      if (GET_CODE (new_rtx) == CLOBBER
543438fd1498Szrj 		  && XEXP (new_rtx, 0) == const0_rtx)
543538fd1498Szrj 		return new_rtx;
543638fd1498Szrj 
543738fd1498Szrj 	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
543838fd1498Szrj 	    }
543938fd1498Szrj 	}
544038fd1498Szrj     }
544138fd1498Szrj   else
544238fd1498Szrj     {
544338fd1498Szrj       len = GET_RTX_LENGTH (code);
544438fd1498Szrj       fmt = GET_RTX_FORMAT (code);
544538fd1498Szrj 
544638fd1498Szrj       /* We don't need to process a SET_DEST that is a register, CC0,
544738fd1498Szrj 	 or PC, so set up to skip this common case.  All other cases
544838fd1498Szrj 	 where we want to suppress replacing something inside a
544938fd1498Szrj 	 SET_SRC are handled via the IN_DEST operand.  */
545038fd1498Szrj       if (code == SET
545138fd1498Szrj 	  && (REG_P (SET_DEST (x))
545238fd1498Szrj 	      || GET_CODE (SET_DEST (x)) == CC0
545338fd1498Szrj 	      || GET_CODE (SET_DEST (x)) == PC))
545438fd1498Szrj 	fmt = "ie";
545538fd1498Szrj 
545638fd1498Szrj       /* Trying to simplify the operands of a widening MULT is not likely
545738fd1498Szrj 	 to create RTL matching a machine insn.  */
545838fd1498Szrj       if (code == MULT
545938fd1498Szrj 	  && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
546038fd1498Szrj 	      || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
546138fd1498Szrj 	  && (GET_CODE (XEXP (x, 1)) == ZERO_EXTEND
546238fd1498Szrj 	      || GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
546338fd1498Szrj 	  && REG_P (XEXP (XEXP (x, 0), 0))
546438fd1498Szrj 	  && REG_P (XEXP (XEXP (x, 1), 0))
546538fd1498Szrj 	  && from == to)
546638fd1498Szrj 	return x;
546738fd1498Szrj 
546838fd1498Szrj 
546938fd1498Szrj       /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
547038fd1498Szrj 	 constant.  */
547138fd1498Szrj       if (fmt[0] == 'e')
547238fd1498Szrj 	op0_mode = GET_MODE (XEXP (x, 0));
547338fd1498Szrj 
547438fd1498Szrj       for (i = 0; i < len; i++)
547538fd1498Szrj 	{
547638fd1498Szrj 	  if (fmt[i] == 'E')
547738fd1498Szrj 	    {
547838fd1498Szrj 	      int j;
547938fd1498Szrj 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
548038fd1498Szrj 		{
548138fd1498Szrj 		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
548238fd1498Szrj 		    {
548338fd1498Szrj 		      new_rtx = (unique_copy && n_occurrences
548438fd1498Szrj 			     ? copy_rtx (to) : to);
548538fd1498Szrj 		      n_occurrences++;
548638fd1498Szrj 		    }
548738fd1498Szrj 		  else
548838fd1498Szrj 		    {
548938fd1498Szrj 		      new_rtx = subst (XVECEXP (x, i, j), from, to, 0, 0,
549038fd1498Szrj 				       unique_copy);
549138fd1498Szrj 
549238fd1498Szrj 		      /* If this substitution failed, this whole thing
549338fd1498Szrj 			 fails.  */
549438fd1498Szrj 		      if (GET_CODE (new_rtx) == CLOBBER
549538fd1498Szrj 			  && XEXP (new_rtx, 0) == const0_rtx)
549638fd1498Szrj 			return new_rtx;
549738fd1498Szrj 		    }
549838fd1498Szrj 
549938fd1498Szrj 		  SUBST (XVECEXP (x, i, j), new_rtx);
550038fd1498Szrj 		}
550138fd1498Szrj 	    }
550238fd1498Szrj 	  else if (fmt[i] == 'e')
550338fd1498Szrj 	    {
550438fd1498Szrj 	      /* If this is a register being set, ignore it.  */
550538fd1498Szrj 	      new_rtx = XEXP (x, i);
550638fd1498Szrj 	      if (in_dest
550738fd1498Szrj 		  && i == 0
550838fd1498Szrj 		  && (((code == SUBREG || code == ZERO_EXTRACT)
550938fd1498Szrj 		       && REG_P (new_rtx))
551038fd1498Szrj 		      || code == STRICT_LOW_PART))
551138fd1498Szrj 		;
551238fd1498Szrj 
551338fd1498Szrj 	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
551438fd1498Szrj 		{
551538fd1498Szrj 		  /* In general, don't install a subreg involving two
551638fd1498Szrj 		     modes not tieable.  It can worsen register
551738fd1498Szrj 		     allocation, and can even make invalid reload
551838fd1498Szrj 		     insns, since the reg inside may need to be copied
551938fd1498Szrj 		     from in the outside mode, and that may be invalid
552038fd1498Szrj 		     if it is an fp reg copied in integer mode.
552138fd1498Szrj 
552238fd1498Szrj 		     We allow two exceptions to this: It is valid if
552338fd1498Szrj 		     it is inside another SUBREG and the mode of that
552438fd1498Szrj 		     SUBREG and the mode of the inside of TO is
552538fd1498Szrj 		     tieable and it is valid if X is a SET that copies
552638fd1498Szrj 		     FROM to CC0.  */
552738fd1498Szrj 
552838fd1498Szrj 		  if (GET_CODE (to) == SUBREG
552938fd1498Szrj 		      && !targetm.modes_tieable_p (GET_MODE (to),
553038fd1498Szrj 						   GET_MODE (SUBREG_REG (to)))
553138fd1498Szrj 		      && ! (code == SUBREG
553238fd1498Szrj 			    && (targetm.modes_tieable_p
553338fd1498Szrj 				(GET_MODE (x), GET_MODE (SUBREG_REG (to)))))
553438fd1498Szrj 		      && (!HAVE_cc0
553538fd1498Szrj 			  || (! (code == SET
553638fd1498Szrj 				 && i == 1
553738fd1498Szrj 				 && XEXP (x, 0) == cc0_rtx))))
553838fd1498Szrj 		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
553938fd1498Szrj 
554038fd1498Szrj 		  if (code == SUBREG
554138fd1498Szrj 		      && REG_P (to)
554238fd1498Szrj 		      && REGNO (to) < FIRST_PSEUDO_REGISTER
554338fd1498Szrj 		      && simplify_subreg_regno (REGNO (to), GET_MODE (to),
554438fd1498Szrj 						SUBREG_BYTE (x),
554538fd1498Szrj 						GET_MODE (x)) < 0)
554638fd1498Szrj 		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
554738fd1498Szrj 
554838fd1498Szrj 		  new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
554938fd1498Szrj 		  n_occurrences++;
555038fd1498Szrj 		}
555138fd1498Szrj 	      else
555238fd1498Szrj 		/* If we are in a SET_DEST, suppress most cases unless we
555338fd1498Szrj 		   have gone inside a MEM, in which case we want to
555438fd1498Szrj 		   simplify the address.  We assume here that things that
555538fd1498Szrj 		   are actually part of the destination have their inner
555638fd1498Szrj 		   parts in the first expression.  This is true for SUBREG,
555738fd1498Szrj 		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
555838fd1498Szrj 		   things aside from REG and MEM that should appear in a
555938fd1498Szrj 		   SET_DEST.  */
556038fd1498Szrj 		new_rtx = subst (XEXP (x, i), from, to,
556138fd1498Szrj 			     (((in_dest
556238fd1498Szrj 				&& (code == SUBREG || code == STRICT_LOW_PART
556338fd1498Szrj 				    || code == ZERO_EXTRACT))
556438fd1498Szrj 			       || code == SET)
556538fd1498Szrj 			      && i == 0),
556638fd1498Szrj 				 code == IF_THEN_ELSE && i == 0,
556738fd1498Szrj 				 unique_copy);
556838fd1498Szrj 
556938fd1498Szrj 	      /* If we found that we will have to reject this combination,
557038fd1498Szrj 		 indicate that by returning the CLOBBER ourselves, rather than
557138fd1498Szrj 		 an expression containing it.  This will speed things up as
557238fd1498Szrj 		 well as prevent accidents where two CLOBBERs are considered
557338fd1498Szrj 		 to be equal, thus producing an incorrect simplification.  */
557438fd1498Szrj 
557538fd1498Szrj 	      if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
557638fd1498Szrj 		return new_rtx;
557738fd1498Szrj 
557838fd1498Szrj 	      if (GET_CODE (x) == SUBREG && CONST_SCALAR_INT_P (new_rtx))
557938fd1498Szrj 		{
558038fd1498Szrj 		  machine_mode mode = GET_MODE (x);
558138fd1498Szrj 
558238fd1498Szrj 		  x = simplify_subreg (GET_MODE (x), new_rtx,
558338fd1498Szrj 				       GET_MODE (SUBREG_REG (x)),
558438fd1498Szrj 				       SUBREG_BYTE (x));
558538fd1498Szrj 		  if (! x)
558638fd1498Szrj 		    x = gen_rtx_CLOBBER (mode, const0_rtx);
558738fd1498Szrj 		}
558838fd1498Szrj 	      else if (CONST_SCALAR_INT_P (new_rtx)
558938fd1498Szrj 		       && (GET_CODE (x) == ZERO_EXTEND
559038fd1498Szrj 			   || GET_CODE (x) == FLOAT
559138fd1498Szrj 			   || GET_CODE (x) == UNSIGNED_FLOAT))
559238fd1498Szrj 		{
559338fd1498Szrj 		  x = simplify_unary_operation (GET_CODE (x), GET_MODE (x),
559438fd1498Szrj 						new_rtx,
559538fd1498Szrj 						GET_MODE (XEXP (x, 0)));
559638fd1498Szrj 		  if (!x)
559738fd1498Szrj 		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
559838fd1498Szrj 		}
559938fd1498Szrj 	      else
560038fd1498Szrj 		SUBST (XEXP (x, i), new_rtx);
560138fd1498Szrj 	    }
560238fd1498Szrj 	}
560338fd1498Szrj     }
560438fd1498Szrj 
560538fd1498Szrj   /* Check if we are loading something from the constant pool via float
560638fd1498Szrj      extension; in this case we would undo compress_float_constant
560738fd1498Szrj      optimization and degenerate constant load to an immediate value.  */
560838fd1498Szrj   if (GET_CODE (x) == FLOAT_EXTEND
560938fd1498Szrj       && MEM_P (XEXP (x, 0))
561038fd1498Szrj       && MEM_READONLY_P (XEXP (x, 0)))
561138fd1498Szrj     {
561238fd1498Szrj       rtx tmp = avoid_constant_pool_reference (x);
561338fd1498Szrj       if (x != tmp)
561438fd1498Szrj         return x;
561538fd1498Szrj     }
561638fd1498Szrj 
561738fd1498Szrj   /* Try to simplify X.  If the simplification changed the code, it is likely
561838fd1498Szrj      that further simplification will help, so loop, but limit the number
561938fd1498Szrj      of repetitions that will be performed.  */
562038fd1498Szrj 
562138fd1498Szrj   for (i = 0; i < 4; i++)
562238fd1498Szrj     {
562338fd1498Szrj       /* If X is sufficiently simple, don't bother trying to do anything
562438fd1498Szrj 	 with it.  */
562538fd1498Szrj       if (code != CONST_INT && code != REG && code != CLOBBER)
562638fd1498Szrj 	x = combine_simplify_rtx (x, op0_mode, in_dest, in_cond);
562738fd1498Szrj 
562838fd1498Szrj       if (GET_CODE (x) == code)
562938fd1498Szrj 	break;
563038fd1498Szrj 
563138fd1498Szrj       code = GET_CODE (x);
563238fd1498Szrj 
563338fd1498Szrj       /* We no longer know the original mode of operand 0 since we
563438fd1498Szrj 	 have changed the form of X)  */
563538fd1498Szrj       op0_mode = VOIDmode;
563638fd1498Szrj     }
563738fd1498Szrj 
563838fd1498Szrj   return x;
563938fd1498Szrj }
564038fd1498Szrj 
564138fd1498Szrj /* If X is a commutative operation whose operands are not in the canonical
564238fd1498Szrj    order, use substitutions to swap them.  */
564338fd1498Szrj 
564438fd1498Szrj static void
maybe_swap_commutative_operands(rtx x)564538fd1498Szrj maybe_swap_commutative_operands (rtx x)
564638fd1498Szrj {
564738fd1498Szrj   if (COMMUTATIVE_ARITH_P (x)
564838fd1498Szrj       && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
564938fd1498Szrj     {
565038fd1498Szrj       rtx temp = XEXP (x, 0);
565138fd1498Szrj       SUBST (XEXP (x, 0), XEXP (x, 1));
565238fd1498Szrj       SUBST (XEXP (x, 1), temp);
565338fd1498Szrj     }
565438fd1498Szrj }
565538fd1498Szrj 
565638fd1498Szrj /* Simplify X, a piece of RTL.  We just operate on the expression at the
565738fd1498Szrj    outer level; call `subst' to simplify recursively.  Return the new
565838fd1498Szrj    expression.
565938fd1498Szrj 
566038fd1498Szrj    OP0_MODE is the original mode of XEXP (x, 0).  IN_DEST is nonzero
566138fd1498Szrj    if we are inside a SET_DEST.  IN_COND is nonzero if we are at the top level
566238fd1498Szrj    of a condition.  */
566338fd1498Szrj 
566438fd1498Szrj static rtx
combine_simplify_rtx(rtx x,machine_mode op0_mode,int in_dest,int in_cond)566538fd1498Szrj combine_simplify_rtx (rtx x, machine_mode op0_mode, int in_dest,
566638fd1498Szrj 		      int in_cond)
566738fd1498Szrj {
566838fd1498Szrj   enum rtx_code code = GET_CODE (x);
566938fd1498Szrj   machine_mode mode = GET_MODE (x);
567038fd1498Szrj   scalar_int_mode int_mode;
567138fd1498Szrj   rtx temp;
567238fd1498Szrj   int i;
567338fd1498Szrj 
567438fd1498Szrj   /* If this is a commutative operation, put a constant last and a complex
567538fd1498Szrj      expression first.  We don't need to do this for comparisons here.  */
567638fd1498Szrj   maybe_swap_commutative_operands (x);
567738fd1498Szrj 
567838fd1498Szrj   /* Try to fold this expression in case we have constants that weren't
567938fd1498Szrj      present before.  */
568038fd1498Szrj   temp = 0;
568138fd1498Szrj   switch (GET_RTX_CLASS (code))
568238fd1498Szrj     {
568338fd1498Szrj     case RTX_UNARY:
568438fd1498Szrj       if (op0_mode == VOIDmode)
568538fd1498Szrj 	op0_mode = GET_MODE (XEXP (x, 0));
568638fd1498Szrj       temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
568738fd1498Szrj       break;
568838fd1498Szrj     case RTX_COMPARE:
568938fd1498Szrj     case RTX_COMM_COMPARE:
569038fd1498Szrj       {
569138fd1498Szrj 	machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
569238fd1498Szrj 	if (cmp_mode == VOIDmode)
569338fd1498Szrj 	  {
569438fd1498Szrj 	    cmp_mode = GET_MODE (XEXP (x, 1));
569538fd1498Szrj 	    if (cmp_mode == VOIDmode)
569638fd1498Szrj 	      cmp_mode = op0_mode;
569738fd1498Szrj 	  }
569838fd1498Szrj 	temp = simplify_relational_operation (code, mode, cmp_mode,
569938fd1498Szrj 					      XEXP (x, 0), XEXP (x, 1));
570038fd1498Szrj       }
570138fd1498Szrj       break;
570238fd1498Szrj     case RTX_COMM_ARITH:
570338fd1498Szrj     case RTX_BIN_ARITH:
570438fd1498Szrj       temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
570538fd1498Szrj       break;
570638fd1498Szrj     case RTX_BITFIELD_OPS:
570738fd1498Szrj     case RTX_TERNARY:
570838fd1498Szrj       temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
570938fd1498Szrj 					 XEXP (x, 1), XEXP (x, 2));
571038fd1498Szrj       break;
571138fd1498Szrj     default:
571238fd1498Szrj       break;
571338fd1498Szrj     }
571438fd1498Szrj 
571538fd1498Szrj   if (temp)
571638fd1498Szrj     {
571738fd1498Szrj       x = temp;
571838fd1498Szrj       code = GET_CODE (temp);
571938fd1498Szrj       op0_mode = VOIDmode;
572038fd1498Szrj       mode = GET_MODE (temp);
572138fd1498Szrj     }
572238fd1498Szrj 
572338fd1498Szrj   /* If this is a simple operation applied to an IF_THEN_ELSE, try
572438fd1498Szrj      applying it to the arms of the IF_THEN_ELSE.  This often simplifies
572538fd1498Szrj      things.  Check for cases where both arms are testing the same
572638fd1498Szrj      condition.
572738fd1498Szrj 
572838fd1498Szrj      Don't do anything if all operands are very simple.  */
572938fd1498Szrj 
573038fd1498Szrj   if ((BINARY_P (x)
573138fd1498Szrj        && ((!OBJECT_P (XEXP (x, 0))
573238fd1498Szrj 	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
573338fd1498Szrj 		  && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
573438fd1498Szrj 	   || (!OBJECT_P (XEXP (x, 1))
573538fd1498Szrj 	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
573638fd1498Szrj 		     && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
573738fd1498Szrj       || (UNARY_P (x)
573838fd1498Szrj 	  && (!OBJECT_P (XEXP (x, 0))
573938fd1498Szrj 	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
574038fd1498Szrj 		     && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
574138fd1498Szrj     {
574238fd1498Szrj       rtx cond, true_rtx, false_rtx;
574338fd1498Szrj 
574438fd1498Szrj       cond = if_then_else_cond (x, &true_rtx, &false_rtx);
574538fd1498Szrj       if (cond != 0
574638fd1498Szrj 	  /* If everything is a comparison, what we have is highly unlikely
574738fd1498Szrj 	     to be simpler, so don't use it.  */
574838fd1498Szrj 	  && ! (COMPARISON_P (x)
574938fd1498Szrj 		&& (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx)))
575038fd1498Szrj 	  /* Similarly, if we end up with one of the expressions the same
575138fd1498Szrj 	     as the original, it is certainly not simpler.  */
575238fd1498Szrj 	  && ! rtx_equal_p (x, true_rtx)
575338fd1498Szrj 	  && ! rtx_equal_p (x, false_rtx))
575438fd1498Szrj 	{
575538fd1498Szrj 	  rtx cop1 = const0_rtx;
575638fd1498Szrj 	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
575738fd1498Szrj 
575838fd1498Szrj 	  if (cond_code == NE && COMPARISON_P (cond))
575938fd1498Szrj 	    return x;
576038fd1498Szrj 
576138fd1498Szrj 	  /* Simplify the alternative arms; this may collapse the true and
576238fd1498Szrj 	     false arms to store-flag values.  Be careful to use copy_rtx
576338fd1498Szrj 	     here since true_rtx or false_rtx might share RTL with x as a
576438fd1498Szrj 	     result of the if_then_else_cond call above.  */
576538fd1498Szrj 	  true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0, 0);
576638fd1498Szrj 	  false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0, 0);
576738fd1498Szrj 
576838fd1498Szrj 	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
576938fd1498Szrj 	     is unlikely to be simpler.  */
577038fd1498Szrj 	  if (general_operand (true_rtx, VOIDmode)
577138fd1498Szrj 	      && general_operand (false_rtx, VOIDmode))
577238fd1498Szrj 	    {
577338fd1498Szrj 	      enum rtx_code reversed;
577438fd1498Szrj 
577538fd1498Szrj 	      /* Restarting if we generate a store-flag expression will cause
577638fd1498Szrj 		 us to loop.  Just drop through in this case.  */
577738fd1498Szrj 
577838fd1498Szrj 	      /* If the result values are STORE_FLAG_VALUE and zero, we can
577938fd1498Szrj 		 just make the comparison operation.  */
578038fd1498Szrj 	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
578138fd1498Szrj 		x = simplify_gen_relational (cond_code, mode, VOIDmode,
578238fd1498Szrj 					     cond, cop1);
578338fd1498Szrj 	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
578438fd1498Szrj 		       && ((reversed = reversed_comparison_code_parts
578538fd1498Szrj 					(cond_code, cond, cop1, NULL))
578638fd1498Szrj 			   != UNKNOWN))
578738fd1498Szrj 		x = simplify_gen_relational (reversed, mode, VOIDmode,
578838fd1498Szrj 					     cond, cop1);
578938fd1498Szrj 
579038fd1498Szrj 	      /* Likewise, we can make the negate of a comparison operation
579138fd1498Szrj 		 if the result values are - STORE_FLAG_VALUE and zero.  */
579238fd1498Szrj 	      else if (CONST_INT_P (true_rtx)
579338fd1498Szrj 		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
579438fd1498Szrj 		       && false_rtx == const0_rtx)
579538fd1498Szrj 		x = simplify_gen_unary (NEG, mode,
579638fd1498Szrj 					simplify_gen_relational (cond_code,
579738fd1498Szrj 								 mode, VOIDmode,
579838fd1498Szrj 								 cond, cop1),
579938fd1498Szrj 					mode);
580038fd1498Szrj 	      else if (CONST_INT_P (false_rtx)
580138fd1498Szrj 		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
580238fd1498Szrj 		       && true_rtx == const0_rtx
580338fd1498Szrj 		       && ((reversed = reversed_comparison_code_parts
580438fd1498Szrj 					(cond_code, cond, cop1, NULL))
580538fd1498Szrj 			   != UNKNOWN))
580638fd1498Szrj 		x = simplify_gen_unary (NEG, mode,
580738fd1498Szrj 					simplify_gen_relational (reversed,
580838fd1498Szrj 								 mode, VOIDmode,
580938fd1498Szrj 								 cond, cop1),
581038fd1498Szrj 					mode);
581138fd1498Szrj 	      else
581238fd1498Szrj 		return gen_rtx_IF_THEN_ELSE (mode,
581338fd1498Szrj 					     simplify_gen_relational (cond_code,
581438fd1498Szrj 								      mode,
581538fd1498Szrj 								      VOIDmode,
581638fd1498Szrj 								      cond,
581738fd1498Szrj 								      cop1),
581838fd1498Szrj 					     true_rtx, false_rtx);
581938fd1498Szrj 
582038fd1498Szrj 	      code = GET_CODE (x);
582138fd1498Szrj 	      op0_mode = VOIDmode;
582238fd1498Szrj 	    }
582338fd1498Szrj 	}
582438fd1498Szrj     }
582538fd1498Szrj 
582638fd1498Szrj   /* First see if we can apply the inverse distributive law.  */
582738fd1498Szrj   if (code == PLUS || code == MINUS
582838fd1498Szrj       || code == AND || code == IOR || code == XOR)
582938fd1498Szrj     {
583038fd1498Szrj       x = apply_distributive_law (x);
583138fd1498Szrj       code = GET_CODE (x);
583238fd1498Szrj       op0_mode = VOIDmode;
583338fd1498Szrj     }
583438fd1498Szrj 
583538fd1498Szrj   /* If CODE is an associative operation not otherwise handled, see if we
583638fd1498Szrj      can associate some operands.  This can win if they are constants or
583738fd1498Szrj      if they are logically related (i.e. (a & b) & a).  */
583838fd1498Szrj   if ((code == PLUS || code == MINUS || code == MULT || code == DIV
583938fd1498Szrj        || code == AND || code == IOR || code == XOR
584038fd1498Szrj        || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
584138fd1498Szrj       && ((INTEGRAL_MODE_P (mode) && code != DIV)
584238fd1498Szrj 	  || (flag_associative_math && FLOAT_MODE_P (mode))))
584338fd1498Szrj     {
584438fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == code)
584538fd1498Szrj 	{
584638fd1498Szrj 	  rtx other = XEXP (XEXP (x, 0), 0);
584738fd1498Szrj 	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
584838fd1498Szrj 	  rtx inner_op1 = XEXP (x, 1);
584938fd1498Szrj 	  rtx inner;
585038fd1498Szrj 
585138fd1498Szrj 	  /* Make sure we pass the constant operand if any as the second
585238fd1498Szrj 	     one if this is a commutative operation.  */
585338fd1498Szrj 	  if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
585438fd1498Szrj 	    std::swap (inner_op0, inner_op1);
585538fd1498Szrj 	  inner = simplify_binary_operation (code == MINUS ? PLUS
585638fd1498Szrj 					     : code == DIV ? MULT
585738fd1498Szrj 					     : code,
585838fd1498Szrj 					     mode, inner_op0, inner_op1);
585938fd1498Szrj 
586038fd1498Szrj 	  /* For commutative operations, try the other pair if that one
586138fd1498Szrj 	     didn't simplify.  */
586238fd1498Szrj 	  if (inner == 0 && COMMUTATIVE_ARITH_P (x))
586338fd1498Szrj 	    {
586438fd1498Szrj 	      other = XEXP (XEXP (x, 0), 1);
586538fd1498Szrj 	      inner = simplify_binary_operation (code, mode,
586638fd1498Szrj 						 XEXP (XEXP (x, 0), 0),
586738fd1498Szrj 						 XEXP (x, 1));
586838fd1498Szrj 	    }
586938fd1498Szrj 
587038fd1498Szrj 	  if (inner)
587138fd1498Szrj 	    return simplify_gen_binary (code, mode, other, inner);
587238fd1498Szrj 	}
587338fd1498Szrj     }
587438fd1498Szrj 
587538fd1498Szrj   /* A little bit of algebraic simplification here.  */
587638fd1498Szrj   switch (code)
587738fd1498Szrj     {
587838fd1498Szrj     case MEM:
587938fd1498Szrj       /* Ensure that our address has any ASHIFTs converted to MULT in case
588038fd1498Szrj 	 address-recognizing predicates are called later.  */
588138fd1498Szrj       temp = make_compound_operation (XEXP (x, 0), MEM);
588238fd1498Szrj       SUBST (XEXP (x, 0), temp);
588338fd1498Szrj       break;
588438fd1498Szrj 
588538fd1498Szrj     case SUBREG:
588638fd1498Szrj       if (op0_mode == VOIDmode)
588738fd1498Szrj 	op0_mode = GET_MODE (SUBREG_REG (x));
588838fd1498Szrj 
588938fd1498Szrj       /* See if this can be moved to simplify_subreg.  */
589038fd1498Szrj       if (CONSTANT_P (SUBREG_REG (x))
589138fd1498Szrj 	  && known_eq (subreg_lowpart_offset (mode, op0_mode), SUBREG_BYTE (x))
589238fd1498Szrj 	     /* Don't call gen_lowpart if the inner mode
589338fd1498Szrj 		is VOIDmode and we cannot simplify it, as SUBREG without
589438fd1498Szrj 		inner mode is invalid.  */
589538fd1498Szrj 	  && (GET_MODE (SUBREG_REG (x)) != VOIDmode
589638fd1498Szrj 	      || gen_lowpart_common (mode, SUBREG_REG (x))))
589738fd1498Szrj 	return gen_lowpart (mode, SUBREG_REG (x));
589838fd1498Szrj 
589938fd1498Szrj       if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
590038fd1498Szrj 	break;
590138fd1498Szrj       {
590238fd1498Szrj 	rtx temp;
590338fd1498Szrj 	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
590438fd1498Szrj 				SUBREG_BYTE (x));
590538fd1498Szrj 	if (temp)
590638fd1498Szrj 	  return temp;
590738fd1498Szrj 
590838fd1498Szrj 	/* If op is known to have all lower bits zero, the result is zero.  */
590938fd1498Szrj 	scalar_int_mode int_mode, int_op0_mode;
591038fd1498Szrj 	if (!in_dest
591138fd1498Szrj 	    && is_a <scalar_int_mode> (mode, &int_mode)
591238fd1498Szrj 	    && is_a <scalar_int_mode> (op0_mode, &int_op0_mode)
591338fd1498Szrj 	    && (GET_MODE_PRECISION (int_mode)
591438fd1498Szrj 		< GET_MODE_PRECISION (int_op0_mode))
591538fd1498Szrj 	    && known_eq (subreg_lowpart_offset (int_mode, int_op0_mode),
591638fd1498Szrj 			 SUBREG_BYTE (x))
591738fd1498Szrj 	    && HWI_COMPUTABLE_MODE_P (int_op0_mode)
5918*58e805e6Szrj 	    && ((nonzero_bits (SUBREG_REG (x), int_op0_mode)
591938fd1498Szrj 		 & GET_MODE_MASK (int_mode)) == 0)
5920*58e805e6Szrj 	    && !side_effects_p (SUBREG_REG (x)))
592138fd1498Szrj 	  return CONST0_RTX (int_mode);
592238fd1498Szrj       }
592338fd1498Szrj 
592438fd1498Szrj       /* Don't change the mode of the MEM if that would change the meaning
592538fd1498Szrj 	 of the address.  */
592638fd1498Szrj       if (MEM_P (SUBREG_REG (x))
592738fd1498Szrj 	  && (MEM_VOLATILE_P (SUBREG_REG (x))
592838fd1498Szrj 	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0),
592938fd1498Szrj 					   MEM_ADDR_SPACE (SUBREG_REG (x)))))
593038fd1498Szrj 	return gen_rtx_CLOBBER (mode, const0_rtx);
593138fd1498Szrj 
593238fd1498Szrj       /* Note that we cannot do any narrowing for non-constants since
593338fd1498Szrj 	 we might have been counting on using the fact that some bits were
593438fd1498Szrj 	 zero.  We now do this in the SET.  */
593538fd1498Szrj 
593638fd1498Szrj       break;
593738fd1498Szrj 
593838fd1498Szrj     case NEG:
593938fd1498Szrj       temp = expand_compound_operation (XEXP (x, 0));
594038fd1498Szrj 
594138fd1498Szrj       /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
594238fd1498Szrj 	 replaced by (lshiftrt X C).  This will convert
594338fd1498Szrj 	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
594438fd1498Szrj 
594538fd1498Szrj       if (GET_CODE (temp) == ASHIFTRT
594638fd1498Szrj 	  && CONST_INT_P (XEXP (temp, 1))
594738fd1498Szrj 	  && INTVAL (XEXP (temp, 1)) == GET_MODE_UNIT_PRECISION (mode) - 1)
594838fd1498Szrj 	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
594938fd1498Szrj 				     INTVAL (XEXP (temp, 1)));
595038fd1498Szrj 
595138fd1498Szrj       /* If X has only a single bit that might be nonzero, say, bit I, convert
595238fd1498Szrj 	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
595338fd1498Szrj 	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
595438fd1498Szrj 	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
595538fd1498Szrj 	 or a SUBREG of one since we'd be making the expression more
595638fd1498Szrj 	 complex if it was just a register.  */
595738fd1498Szrj 
595838fd1498Szrj       if (!REG_P (temp)
595938fd1498Szrj 	  && ! (GET_CODE (temp) == SUBREG
596038fd1498Szrj 		&& REG_P (SUBREG_REG (temp)))
596138fd1498Szrj 	  && is_a <scalar_int_mode> (mode, &int_mode)
596238fd1498Szrj 	  && (i = exact_log2 (nonzero_bits (temp, int_mode))) >= 0)
596338fd1498Szrj 	{
596438fd1498Szrj 	  rtx temp1 = simplify_shift_const
596538fd1498Szrj 	    (NULL_RTX, ASHIFTRT, int_mode,
596638fd1498Szrj 	     simplify_shift_const (NULL_RTX, ASHIFT, int_mode, temp,
596738fd1498Szrj 				   GET_MODE_PRECISION (int_mode) - 1 - i),
596838fd1498Szrj 	     GET_MODE_PRECISION (int_mode) - 1 - i);
596938fd1498Szrj 
597038fd1498Szrj 	  /* If all we did was surround TEMP with the two shifts, we
597138fd1498Szrj 	     haven't improved anything, so don't use it.  Otherwise,
597238fd1498Szrj 	     we are better off with TEMP1.  */
597338fd1498Szrj 	  if (GET_CODE (temp1) != ASHIFTRT
597438fd1498Szrj 	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
597538fd1498Szrj 	      || XEXP (XEXP (temp1, 0), 0) != temp)
597638fd1498Szrj 	    return temp1;
597738fd1498Szrj 	}
597838fd1498Szrj       break;
597938fd1498Szrj 
598038fd1498Szrj     case TRUNCATE:
598138fd1498Szrj       /* We can't handle truncation to a partial integer mode here
598238fd1498Szrj 	 because we don't know the real bitsize of the partial
598338fd1498Szrj 	 integer mode.  */
598438fd1498Szrj       if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
598538fd1498Szrj 	break;
598638fd1498Szrj 
598738fd1498Szrj       if (HWI_COMPUTABLE_MODE_P (mode))
598838fd1498Szrj 	SUBST (XEXP (x, 0),
598938fd1498Szrj 	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
599038fd1498Szrj 			      GET_MODE_MASK (mode), 0));
599138fd1498Szrj 
599238fd1498Szrj       /* We can truncate a constant value and return it.  */
599338fd1498Szrj       if (CONST_INT_P (XEXP (x, 0)))
599438fd1498Szrj 	return gen_int_mode (INTVAL (XEXP (x, 0)), mode);
599538fd1498Szrj 
599638fd1498Szrj       /* Similarly to what we do in simplify-rtx.c, a truncate of a register
599738fd1498Szrj 	 whose value is a comparison can be replaced with a subreg if
599838fd1498Szrj 	 STORE_FLAG_VALUE permits.  */
599938fd1498Szrj       if (HWI_COMPUTABLE_MODE_P (mode)
600038fd1498Szrj 	  && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
600138fd1498Szrj 	  && (temp = get_last_value (XEXP (x, 0)))
600238fd1498Szrj 	  && COMPARISON_P (temp))
600338fd1498Szrj 	return gen_lowpart (mode, XEXP (x, 0));
600438fd1498Szrj       break;
600538fd1498Szrj 
600638fd1498Szrj     case CONST:
600738fd1498Szrj       /* (const (const X)) can become (const X).  Do it this way rather than
600838fd1498Szrj 	 returning the inner CONST since CONST can be shared with a
600938fd1498Szrj 	 REG_EQUAL note.  */
601038fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == CONST)
601138fd1498Szrj 	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
601238fd1498Szrj       break;
601338fd1498Szrj 
601438fd1498Szrj     case LO_SUM:
601538fd1498Szrj       /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
601638fd1498Szrj 	 can add in an offset.  find_split_point will split this address up
601738fd1498Szrj 	 again if it doesn't match.  */
601838fd1498Szrj       if (HAVE_lo_sum && GET_CODE (XEXP (x, 0)) == HIGH
601938fd1498Szrj 	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
602038fd1498Szrj 	return XEXP (x, 1);
602138fd1498Szrj       break;
602238fd1498Szrj 
602338fd1498Szrj     case PLUS:
602438fd1498Szrj       /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
602538fd1498Szrj 	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
602638fd1498Szrj 	 bit-field and can be replaced by either a sign_extend or a
602738fd1498Szrj 	 sign_extract.  The `and' may be a zero_extend and the two
602838fd1498Szrj 	 <c>, -<c> constants may be reversed.  */
602938fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == XOR
603038fd1498Szrj 	  && is_a <scalar_int_mode> (mode, &int_mode)
603138fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
603238fd1498Szrj 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
603338fd1498Szrj 	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
603438fd1498Szrj 	  && ((i = exact_log2 (UINTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
603538fd1498Szrj 	      || (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0)
603638fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (int_mode)
603738fd1498Szrj 	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
603838fd1498Szrj 	       && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
603938fd1498Szrj 	       && (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
604038fd1498Szrj 		   == (HOST_WIDE_INT_1U << (i + 1)) - 1))
604138fd1498Szrj 	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
604238fd1498Szrj 		  && known_eq ((GET_MODE_PRECISION
604338fd1498Szrj 				(GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))),
604438fd1498Szrj 			       (unsigned int) i + 1))))
604538fd1498Szrj 	return simplify_shift_const
604638fd1498Szrj 	  (NULL_RTX, ASHIFTRT, int_mode,
604738fd1498Szrj 	   simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
604838fd1498Szrj 				 XEXP (XEXP (XEXP (x, 0), 0), 0),
604938fd1498Szrj 				 GET_MODE_PRECISION (int_mode) - (i + 1)),
605038fd1498Szrj 	   GET_MODE_PRECISION (int_mode) - (i + 1));
605138fd1498Szrj 
605238fd1498Szrj       /* If only the low-order bit of X is possibly nonzero, (plus x -1)
605338fd1498Szrj 	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
605438fd1498Szrj 	 the bitsize of the mode - 1.  This allows simplification of
605538fd1498Szrj 	 "a = (b & 8) == 0;"  */
605638fd1498Szrj       if (XEXP (x, 1) == constm1_rtx
605738fd1498Szrj 	  && !REG_P (XEXP (x, 0))
605838fd1498Szrj 	  && ! (GET_CODE (XEXP (x, 0)) == SUBREG
605938fd1498Szrj 		&& REG_P (SUBREG_REG (XEXP (x, 0))))
606038fd1498Szrj 	  && is_a <scalar_int_mode> (mode, &int_mode)
606138fd1498Szrj 	  && nonzero_bits (XEXP (x, 0), int_mode) == 1)
606238fd1498Szrj 	return simplify_shift_const
606338fd1498Szrj 	  (NULL_RTX, ASHIFTRT, int_mode,
606438fd1498Szrj 	   simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
606538fd1498Szrj 				 gen_rtx_XOR (int_mode, XEXP (x, 0),
606638fd1498Szrj 					      const1_rtx),
606738fd1498Szrj 				 GET_MODE_PRECISION (int_mode) - 1),
606838fd1498Szrj 	   GET_MODE_PRECISION (int_mode) - 1);
606938fd1498Szrj 
607038fd1498Szrj       /* If we are adding two things that have no bits in common, convert
607138fd1498Szrj 	 the addition into an IOR.  This will often be further simplified,
607238fd1498Szrj 	 for example in cases like ((a & 1) + (a & 2)), which can
607338fd1498Szrj 	 become a & 3.  */
607438fd1498Szrj 
607538fd1498Szrj       if (HWI_COMPUTABLE_MODE_P (mode)
607638fd1498Szrj 	  && (nonzero_bits (XEXP (x, 0), mode)
607738fd1498Szrj 	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
607838fd1498Szrj 	{
607938fd1498Szrj 	  /* Try to simplify the expression further.  */
608038fd1498Szrj 	  rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
608138fd1498Szrj 	  temp = combine_simplify_rtx (tor, VOIDmode, in_dest, 0);
608238fd1498Szrj 
608338fd1498Szrj 	  /* If we could, great.  If not, do not go ahead with the IOR
608438fd1498Szrj 	     replacement, since PLUS appears in many special purpose
608538fd1498Szrj 	     address arithmetic instructions.  */
608638fd1498Szrj 	  if (GET_CODE (temp) != CLOBBER
608738fd1498Szrj 	      && (GET_CODE (temp) != IOR
608838fd1498Szrj 		  || ((XEXP (temp, 0) != XEXP (x, 0)
608938fd1498Szrj 		       || XEXP (temp, 1) != XEXP (x, 1))
609038fd1498Szrj 		      && (XEXP (temp, 0) != XEXP (x, 1)
609138fd1498Szrj 			  || XEXP (temp, 1) != XEXP (x, 0)))))
609238fd1498Szrj 	    return temp;
609338fd1498Szrj 	}
609438fd1498Szrj 
609538fd1498Szrj       /* Canonicalize x + x into x << 1.  */
609638fd1498Szrj       if (GET_MODE_CLASS (mode) == MODE_INT
609738fd1498Szrj 	  && rtx_equal_p (XEXP (x, 0), XEXP (x, 1))
609838fd1498Szrj 	  && !side_effects_p (XEXP (x, 0)))
609938fd1498Szrj 	return simplify_gen_binary (ASHIFT, mode, XEXP (x, 0), const1_rtx);
610038fd1498Szrj 
610138fd1498Szrj       break;
610238fd1498Szrj 
610338fd1498Szrj     case MINUS:
610438fd1498Szrj       /* (minus <foo> (and <foo> (const_int -pow2))) becomes
610538fd1498Szrj 	 (and <foo> (const_int pow2-1))  */
610638fd1498Szrj       if (is_a <scalar_int_mode> (mode, &int_mode)
610738fd1498Szrj 	  && GET_CODE (XEXP (x, 1)) == AND
610838fd1498Szrj 	  && CONST_INT_P (XEXP (XEXP (x, 1), 1))
610938fd1498Szrj 	  && pow2p_hwi (-UINTVAL (XEXP (XEXP (x, 1), 1)))
611038fd1498Szrj 	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
611138fd1498Szrj 	return simplify_and_const_int (NULL_RTX, int_mode, XEXP (x, 0),
611238fd1498Szrj 				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
611338fd1498Szrj       break;
611438fd1498Szrj 
611538fd1498Szrj     case MULT:
611638fd1498Szrj       /* If we have (mult (plus A B) C), apply the distributive law and then
611738fd1498Szrj 	 the inverse distributive law to see if things simplify.  This
611838fd1498Szrj 	 occurs mostly in addresses, often when unrolling loops.  */
611938fd1498Szrj 
612038fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == PLUS)
612138fd1498Szrj 	{
612238fd1498Szrj 	  rtx result = distribute_and_simplify_rtx (x, 0);
612338fd1498Szrj 	  if (result)
612438fd1498Szrj 	    return result;
612538fd1498Szrj 	}
612638fd1498Szrj 
612738fd1498Szrj       /* Try simplify a*(b/c) as (a*b)/c.  */
612838fd1498Szrj       if (FLOAT_MODE_P (mode) && flag_associative_math
612938fd1498Szrj 	  && GET_CODE (XEXP (x, 0)) == DIV)
613038fd1498Szrj 	{
613138fd1498Szrj 	  rtx tem = simplify_binary_operation (MULT, mode,
613238fd1498Szrj 					       XEXP (XEXP (x, 0), 0),
613338fd1498Szrj 					       XEXP (x, 1));
613438fd1498Szrj 	  if (tem)
613538fd1498Szrj 	    return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
613638fd1498Szrj 	}
613738fd1498Szrj       break;
613838fd1498Szrj 
613938fd1498Szrj     case UDIV:
614038fd1498Szrj       /* If this is a divide by a power of two, treat it as a shift if
614138fd1498Szrj 	 its first operand is a shift.  */
614238fd1498Szrj       if (is_a <scalar_int_mode> (mode, &int_mode)
614338fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
614438fd1498Szrj 	  && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
614538fd1498Szrj 	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
614638fd1498Szrj 	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
614738fd1498Szrj 	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
614838fd1498Szrj 	      || GET_CODE (XEXP (x, 0)) == ROTATE
614938fd1498Szrj 	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
615038fd1498Szrj 	return simplify_shift_const (NULL_RTX, LSHIFTRT, int_mode,
615138fd1498Szrj 				     XEXP (x, 0), i);
615238fd1498Szrj       break;
615338fd1498Szrj 
615438fd1498Szrj     case EQ:  case NE:
615538fd1498Szrj     case GT:  case GTU:  case GE:  case GEU:
615638fd1498Szrj     case LT:  case LTU:  case LE:  case LEU:
615738fd1498Szrj     case UNEQ:  case LTGT:
615838fd1498Szrj     case UNGT:  case UNGE:
615938fd1498Szrj     case UNLT:  case UNLE:
616038fd1498Szrj     case UNORDERED: case ORDERED:
616138fd1498Szrj       /* If the first operand is a condition code, we can't do anything
616238fd1498Szrj 	 with it.  */
616338fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == COMPARE
616438fd1498Szrj 	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
616538fd1498Szrj 	      && ! CC0_P (XEXP (x, 0))))
616638fd1498Szrj 	{
616738fd1498Szrj 	  rtx op0 = XEXP (x, 0);
616838fd1498Szrj 	  rtx op1 = XEXP (x, 1);
616938fd1498Szrj 	  enum rtx_code new_code;
617038fd1498Szrj 
617138fd1498Szrj 	  if (GET_CODE (op0) == COMPARE)
617238fd1498Szrj 	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
617338fd1498Szrj 
617438fd1498Szrj 	  /* Simplify our comparison, if possible.  */
617538fd1498Szrj 	  new_code = simplify_comparison (code, &op0, &op1);
617638fd1498Szrj 
617738fd1498Szrj 	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
617838fd1498Szrj 	     if only the low-order bit is possibly nonzero in X (such as when
617938fd1498Szrj 	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
618038fd1498Szrj 	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
618138fd1498Szrj 	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
618238fd1498Szrj 	     (plus X 1).
618338fd1498Szrj 
618438fd1498Szrj 	     Remove any ZERO_EXTRACT we made when thinking this was a
618538fd1498Szrj 	     comparison.  It may now be simpler to use, e.g., an AND.  If a
618638fd1498Szrj 	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
618738fd1498Szrj 	     the call to make_compound_operation in the SET case.
618838fd1498Szrj 
618938fd1498Szrj 	     Don't apply these optimizations if the caller would
619038fd1498Szrj 	     prefer a comparison rather than a value.
619138fd1498Szrj 	     E.g., for the condition in an IF_THEN_ELSE most targets need
619238fd1498Szrj 	     an explicit comparison.  */
619338fd1498Szrj 
619438fd1498Szrj 	  if (in_cond)
619538fd1498Szrj 	    ;
619638fd1498Szrj 
619738fd1498Szrj 	  else if (STORE_FLAG_VALUE == 1
619838fd1498Szrj 		   && new_code == NE
619938fd1498Szrj 		   && is_int_mode (mode, &int_mode)
620038fd1498Szrj 		   && op1 == const0_rtx
620138fd1498Szrj 		   && int_mode == GET_MODE (op0)
620238fd1498Szrj 		   && nonzero_bits (op0, int_mode) == 1)
620338fd1498Szrj 	    return gen_lowpart (int_mode,
620438fd1498Szrj 				expand_compound_operation (op0));
620538fd1498Szrj 
620638fd1498Szrj 	  else if (STORE_FLAG_VALUE == 1
620738fd1498Szrj 		   && new_code == NE
620838fd1498Szrj 		   && is_int_mode (mode, &int_mode)
620938fd1498Szrj 		   && op1 == const0_rtx
621038fd1498Szrj 		   && int_mode == GET_MODE (op0)
621138fd1498Szrj 		   && (num_sign_bit_copies (op0, int_mode)
621238fd1498Szrj 		       == GET_MODE_PRECISION (int_mode)))
621338fd1498Szrj 	    {
621438fd1498Szrj 	      op0 = expand_compound_operation (op0);
621538fd1498Szrj 	      return simplify_gen_unary (NEG, int_mode,
621638fd1498Szrj 					 gen_lowpart (int_mode, op0),
621738fd1498Szrj 					 int_mode);
621838fd1498Szrj 	    }
621938fd1498Szrj 
622038fd1498Szrj 	  else if (STORE_FLAG_VALUE == 1
622138fd1498Szrj 		   && new_code == EQ
622238fd1498Szrj 		   && is_int_mode (mode, &int_mode)
622338fd1498Szrj 		   && op1 == const0_rtx
622438fd1498Szrj 		   && int_mode == GET_MODE (op0)
622538fd1498Szrj 		   && nonzero_bits (op0, int_mode) == 1)
622638fd1498Szrj 	    {
622738fd1498Szrj 	      op0 = expand_compound_operation (op0);
622838fd1498Szrj 	      return simplify_gen_binary (XOR, int_mode,
622938fd1498Szrj 					  gen_lowpart (int_mode, op0),
623038fd1498Szrj 					  const1_rtx);
623138fd1498Szrj 	    }
623238fd1498Szrj 
623338fd1498Szrj 	  else if (STORE_FLAG_VALUE == 1
623438fd1498Szrj 		   && new_code == EQ
623538fd1498Szrj 		   && is_int_mode (mode, &int_mode)
623638fd1498Szrj 		   && op1 == const0_rtx
623738fd1498Szrj 		   && int_mode == GET_MODE (op0)
623838fd1498Szrj 		   && (num_sign_bit_copies (op0, int_mode)
623938fd1498Szrj 		       == GET_MODE_PRECISION (int_mode)))
624038fd1498Szrj 	    {
624138fd1498Szrj 	      op0 = expand_compound_operation (op0);
624238fd1498Szrj 	      return plus_constant (int_mode, gen_lowpart (int_mode, op0), 1);
624338fd1498Szrj 	    }
624438fd1498Szrj 
624538fd1498Szrj 	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
624638fd1498Szrj 	     those above.  */
624738fd1498Szrj 	  if (in_cond)
624838fd1498Szrj 	    ;
624938fd1498Szrj 
625038fd1498Szrj 	  else if (STORE_FLAG_VALUE == -1
625138fd1498Szrj 		   && new_code == NE
625238fd1498Szrj 		   && is_int_mode (mode, &int_mode)
625338fd1498Szrj 		   && op1 == const0_rtx
625438fd1498Szrj 		   && int_mode == GET_MODE (op0)
625538fd1498Szrj 		   && (num_sign_bit_copies (op0, int_mode)
625638fd1498Szrj 		       == GET_MODE_PRECISION (int_mode)))
625738fd1498Szrj 	    return gen_lowpart (int_mode, expand_compound_operation (op0));
625838fd1498Szrj 
625938fd1498Szrj 	  else if (STORE_FLAG_VALUE == -1
626038fd1498Szrj 		   && new_code == NE
626138fd1498Szrj 		   && is_int_mode (mode, &int_mode)
626238fd1498Szrj 		   && op1 == const0_rtx
626338fd1498Szrj 		   && int_mode == GET_MODE (op0)
626438fd1498Szrj 		   && nonzero_bits (op0, int_mode) == 1)
626538fd1498Szrj 	    {
626638fd1498Szrj 	      op0 = expand_compound_operation (op0);
626738fd1498Szrj 	      return simplify_gen_unary (NEG, int_mode,
626838fd1498Szrj 					 gen_lowpart (int_mode, op0),
626938fd1498Szrj 					 int_mode);
627038fd1498Szrj 	    }
627138fd1498Szrj 
627238fd1498Szrj 	  else if (STORE_FLAG_VALUE == -1
627338fd1498Szrj 		   && new_code == EQ
627438fd1498Szrj 		   && is_int_mode (mode, &int_mode)
627538fd1498Szrj 		   && op1 == const0_rtx
627638fd1498Szrj 		   && int_mode == GET_MODE (op0)
627738fd1498Szrj 		   && (num_sign_bit_copies (op0, int_mode)
627838fd1498Szrj 		       == GET_MODE_PRECISION (int_mode)))
627938fd1498Szrj 	    {
628038fd1498Szrj 	      op0 = expand_compound_operation (op0);
628138fd1498Szrj 	      return simplify_gen_unary (NOT, int_mode,
628238fd1498Szrj 					 gen_lowpart (int_mode, op0),
628338fd1498Szrj 					 int_mode);
628438fd1498Szrj 	    }
628538fd1498Szrj 
628638fd1498Szrj 	  /* If X is 0/1, (eq X 0) is X-1.  */
628738fd1498Szrj 	  else if (STORE_FLAG_VALUE == -1
628838fd1498Szrj 		   && new_code == EQ
628938fd1498Szrj 		   && is_int_mode (mode, &int_mode)
629038fd1498Szrj 		   && op1 == const0_rtx
629138fd1498Szrj 		   && int_mode == GET_MODE (op0)
629238fd1498Szrj 		   && nonzero_bits (op0, int_mode) == 1)
629338fd1498Szrj 	    {
629438fd1498Szrj 	      op0 = expand_compound_operation (op0);
629538fd1498Szrj 	      return plus_constant (int_mode, gen_lowpart (int_mode, op0), -1);
629638fd1498Szrj 	    }
629738fd1498Szrj 
629838fd1498Szrj 	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
629938fd1498Szrj 	     one bit that might be nonzero, we can convert (ne x 0) to
630038fd1498Szrj 	     (ashift x c) where C puts the bit in the sign bit.  Remove any
630138fd1498Szrj 	     AND with STORE_FLAG_VALUE when we are done, since we are only
630238fd1498Szrj 	     going to test the sign bit.  */
630338fd1498Szrj 	  if (new_code == NE
630438fd1498Szrj 	      && is_int_mode (mode, &int_mode)
630538fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (int_mode)
630638fd1498Szrj 	      && val_signbit_p (int_mode, STORE_FLAG_VALUE)
630738fd1498Szrj 	      && op1 == const0_rtx
630838fd1498Szrj 	      && int_mode == GET_MODE (op0)
630938fd1498Szrj 	      && (i = exact_log2 (nonzero_bits (op0, int_mode))) >= 0)
631038fd1498Szrj 	    {
631138fd1498Szrj 	      x = simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
631238fd1498Szrj 					expand_compound_operation (op0),
631338fd1498Szrj 					GET_MODE_PRECISION (int_mode) - 1 - i);
631438fd1498Szrj 	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
631538fd1498Szrj 		return XEXP (x, 0);
631638fd1498Szrj 	      else
631738fd1498Szrj 		return x;
631838fd1498Szrj 	    }
631938fd1498Szrj 
632038fd1498Szrj 	  /* If the code changed, return a whole new comparison.
632138fd1498Szrj 	     We also need to avoid using SUBST in cases where
632238fd1498Szrj 	     simplify_comparison has widened a comparison with a CONST_INT,
632338fd1498Szrj 	     since in that case the wider CONST_INT may fail the sanity
632438fd1498Szrj 	     checks in do_SUBST.  */
632538fd1498Szrj 	  if (new_code != code
632638fd1498Szrj 	      || (CONST_INT_P (op1)
632738fd1498Szrj 		  && GET_MODE (op0) != GET_MODE (XEXP (x, 0))
632838fd1498Szrj 		  && GET_MODE (op0) != GET_MODE (XEXP (x, 1))))
632938fd1498Szrj 	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
633038fd1498Szrj 
633138fd1498Szrj 	  /* Otherwise, keep this operation, but maybe change its operands.
633238fd1498Szrj 	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
633338fd1498Szrj 	  SUBST (XEXP (x, 0), op0);
633438fd1498Szrj 	  SUBST (XEXP (x, 1), op1);
633538fd1498Szrj 	}
633638fd1498Szrj       break;
633738fd1498Szrj 
633838fd1498Szrj     case IF_THEN_ELSE:
633938fd1498Szrj       return simplify_if_then_else (x);
634038fd1498Szrj 
634138fd1498Szrj     case ZERO_EXTRACT:
634238fd1498Szrj     case SIGN_EXTRACT:
634338fd1498Szrj     case ZERO_EXTEND:
634438fd1498Szrj     case SIGN_EXTEND:
634538fd1498Szrj       /* If we are processing SET_DEST, we are done.  */
634638fd1498Szrj       if (in_dest)
634738fd1498Szrj 	return x;
634838fd1498Szrj 
634938fd1498Szrj       return expand_compound_operation (x);
635038fd1498Szrj 
635138fd1498Szrj     case SET:
635238fd1498Szrj       return simplify_set (x);
635338fd1498Szrj 
635438fd1498Szrj     case AND:
635538fd1498Szrj     case IOR:
635638fd1498Szrj       return simplify_logical (x);
635738fd1498Szrj 
635838fd1498Szrj     case ASHIFT:
635938fd1498Szrj     case LSHIFTRT:
636038fd1498Szrj     case ASHIFTRT:
636138fd1498Szrj     case ROTATE:
636238fd1498Szrj     case ROTATERT:
636338fd1498Szrj       /* If this is a shift by a constant amount, simplify it.  */
636438fd1498Szrj       if (CONST_INT_P (XEXP (x, 1)))
636538fd1498Szrj 	return simplify_shift_const (x, code, mode, XEXP (x, 0),
636638fd1498Szrj 				     INTVAL (XEXP (x, 1)));
636738fd1498Szrj 
636838fd1498Szrj       else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
636938fd1498Szrj 	SUBST (XEXP (x, 1),
637038fd1498Szrj 	       force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
637138fd1498Szrj 			      (HOST_WIDE_INT_1U
637238fd1498Szrj 			       << exact_log2 (GET_MODE_UNIT_BITSIZE
637338fd1498Szrj 					      (GET_MODE (x))))
637438fd1498Szrj 			      - 1,
637538fd1498Szrj 			      0));
637638fd1498Szrj       break;
637738fd1498Szrj 
637838fd1498Szrj     default:
637938fd1498Szrj       break;
638038fd1498Szrj     }
638138fd1498Szrj 
638238fd1498Szrj   return x;
638338fd1498Szrj }
638438fd1498Szrj 
638538fd1498Szrj /* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
638638fd1498Szrj 
638738fd1498Szrj static rtx
simplify_if_then_else(rtx x)638838fd1498Szrj simplify_if_then_else (rtx x)
638938fd1498Szrj {
639038fd1498Szrj   machine_mode mode = GET_MODE (x);
639138fd1498Szrj   rtx cond = XEXP (x, 0);
639238fd1498Szrj   rtx true_rtx = XEXP (x, 1);
639338fd1498Szrj   rtx false_rtx = XEXP (x, 2);
639438fd1498Szrj   enum rtx_code true_code = GET_CODE (cond);
639538fd1498Szrj   int comparison_p = COMPARISON_P (cond);
639638fd1498Szrj   rtx temp;
639738fd1498Szrj   int i;
639838fd1498Szrj   enum rtx_code false_code;
639938fd1498Szrj   rtx reversed;
640038fd1498Szrj   scalar_int_mode int_mode, inner_mode;
640138fd1498Szrj 
640238fd1498Szrj   /* Simplify storing of the truth value.  */
640338fd1498Szrj   if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
640438fd1498Szrj     return simplify_gen_relational (true_code, mode, VOIDmode,
640538fd1498Szrj 				    XEXP (cond, 0), XEXP (cond, 1));
640638fd1498Szrj 
640738fd1498Szrj   /* Also when the truth value has to be reversed.  */
640838fd1498Szrj   if (comparison_p
640938fd1498Szrj       && true_rtx == const0_rtx && false_rtx == const_true_rtx
641038fd1498Szrj       && (reversed = reversed_comparison (cond, mode)))
641138fd1498Szrj     return reversed;
641238fd1498Szrj 
641338fd1498Szrj   /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
641438fd1498Szrj      in it is being compared against certain values.  Get the true and false
641538fd1498Szrj      comparisons and see if that says anything about the value of each arm.  */
641638fd1498Szrj 
641738fd1498Szrj   if (comparison_p
641838fd1498Szrj       && ((false_code = reversed_comparison_code (cond, NULL))
641938fd1498Szrj 	  != UNKNOWN)
642038fd1498Szrj       && REG_P (XEXP (cond, 0)))
642138fd1498Szrj     {
642238fd1498Szrj       HOST_WIDE_INT nzb;
642338fd1498Szrj       rtx from = XEXP (cond, 0);
642438fd1498Szrj       rtx true_val = XEXP (cond, 1);
642538fd1498Szrj       rtx false_val = true_val;
642638fd1498Szrj       int swapped = 0;
642738fd1498Szrj 
642838fd1498Szrj       /* If FALSE_CODE is EQ, swap the codes and arms.  */
642938fd1498Szrj 
643038fd1498Szrj       if (false_code == EQ)
643138fd1498Szrj 	{
643238fd1498Szrj 	  swapped = 1, true_code = EQ, false_code = NE;
643338fd1498Szrj 	  std::swap (true_rtx, false_rtx);
643438fd1498Szrj 	}
643538fd1498Szrj 
643638fd1498Szrj       scalar_int_mode from_mode;
643738fd1498Szrj       if (is_a <scalar_int_mode> (GET_MODE (from), &from_mode))
643838fd1498Szrj 	{
643938fd1498Szrj 	  /* If we are comparing against zero and the expression being
644038fd1498Szrj 	     tested has only a single bit that might be nonzero, that is
644138fd1498Szrj 	     its value when it is not equal to zero.  Similarly if it is
644238fd1498Szrj 	     known to be -1 or 0.  */
644338fd1498Szrj 	  if (true_code == EQ
644438fd1498Szrj 	      && true_val == const0_rtx
644538fd1498Szrj 	      && pow2p_hwi (nzb = nonzero_bits (from, from_mode)))
644638fd1498Szrj 	    {
644738fd1498Szrj 	      false_code = EQ;
644838fd1498Szrj 	      false_val = gen_int_mode (nzb, from_mode);
644938fd1498Szrj 	    }
645038fd1498Szrj 	  else if (true_code == EQ
645138fd1498Szrj 		   && true_val == const0_rtx
645238fd1498Szrj 		   && (num_sign_bit_copies (from, from_mode)
645338fd1498Szrj 		       == GET_MODE_PRECISION (from_mode)))
645438fd1498Szrj 	    {
645538fd1498Szrj 	      false_code = EQ;
645638fd1498Szrj 	      false_val = constm1_rtx;
645738fd1498Szrj 	    }
645838fd1498Szrj 	}
645938fd1498Szrj 
646038fd1498Szrj       /* Now simplify an arm if we know the value of the register in the
646138fd1498Szrj 	 branch and it is used in the arm.  Be careful due to the potential
646238fd1498Szrj 	 of locally-shared RTL.  */
646338fd1498Szrj 
646438fd1498Szrj       if (reg_mentioned_p (from, true_rtx))
646538fd1498Szrj 	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
646638fd1498Szrj 				      from, true_val),
646738fd1498Szrj 			  pc_rtx, pc_rtx, 0, 0, 0);
646838fd1498Szrj       if (reg_mentioned_p (from, false_rtx))
646938fd1498Szrj 	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
647038fd1498Szrj 				       from, false_val),
647138fd1498Szrj 			   pc_rtx, pc_rtx, 0, 0, 0);
647238fd1498Szrj 
647338fd1498Szrj       SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
647438fd1498Szrj       SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
647538fd1498Szrj 
647638fd1498Szrj       true_rtx = XEXP (x, 1);
647738fd1498Szrj       false_rtx = XEXP (x, 2);
647838fd1498Szrj       true_code = GET_CODE (cond);
647938fd1498Szrj     }
648038fd1498Szrj 
648138fd1498Szrj   /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
648238fd1498Szrj      reversed, do so to avoid needing two sets of patterns for
648338fd1498Szrj      subtract-and-branch insns.  Similarly if we have a constant in the true
648438fd1498Szrj      arm, the false arm is the same as the first operand of the comparison, or
648538fd1498Szrj      the false arm is more complicated than the true arm.  */
648638fd1498Szrj 
648738fd1498Szrj   if (comparison_p
648838fd1498Szrj       && reversed_comparison_code (cond, NULL) != UNKNOWN
648938fd1498Szrj       && (true_rtx == pc_rtx
649038fd1498Szrj 	  || (CONSTANT_P (true_rtx)
649138fd1498Szrj 	      && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
649238fd1498Szrj 	  || true_rtx == const0_rtx
649338fd1498Szrj 	  || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
649438fd1498Szrj 	  || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
649538fd1498Szrj 	      && !OBJECT_P (false_rtx))
649638fd1498Szrj 	  || reg_mentioned_p (true_rtx, false_rtx)
649738fd1498Szrj 	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
649838fd1498Szrj     {
649938fd1498Szrj       true_code = reversed_comparison_code (cond, NULL);
650038fd1498Szrj       SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
650138fd1498Szrj       SUBST (XEXP (x, 1), false_rtx);
650238fd1498Szrj       SUBST (XEXP (x, 2), true_rtx);
650338fd1498Szrj 
650438fd1498Szrj       std::swap (true_rtx, false_rtx);
650538fd1498Szrj       cond = XEXP (x, 0);
650638fd1498Szrj 
650738fd1498Szrj       /* It is possible that the conditional has been simplified out.  */
650838fd1498Szrj       true_code = GET_CODE (cond);
650938fd1498Szrj       comparison_p = COMPARISON_P (cond);
651038fd1498Szrj     }
651138fd1498Szrj 
651238fd1498Szrj   /* If the two arms are identical, we don't need the comparison.  */
651338fd1498Szrj 
651438fd1498Szrj   if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
651538fd1498Szrj     return true_rtx;
651638fd1498Szrj 
651738fd1498Szrj   /* Convert a == b ? b : a to "a".  */
651838fd1498Szrj   if (true_code == EQ && ! side_effects_p (cond)
651938fd1498Szrj       && !HONOR_NANS (mode)
652038fd1498Szrj       && rtx_equal_p (XEXP (cond, 0), false_rtx)
652138fd1498Szrj       && rtx_equal_p (XEXP (cond, 1), true_rtx))
652238fd1498Szrj     return false_rtx;
652338fd1498Szrj   else if (true_code == NE && ! side_effects_p (cond)
652438fd1498Szrj 	   && !HONOR_NANS (mode)
652538fd1498Szrj 	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
652638fd1498Szrj 	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
652738fd1498Szrj     return true_rtx;
652838fd1498Szrj 
652938fd1498Szrj   /* Look for cases where we have (abs x) or (neg (abs X)).  */
653038fd1498Szrj 
653138fd1498Szrj   if (GET_MODE_CLASS (mode) == MODE_INT
653238fd1498Szrj       && comparison_p
653338fd1498Szrj       && XEXP (cond, 1) == const0_rtx
653438fd1498Szrj       && GET_CODE (false_rtx) == NEG
653538fd1498Szrj       && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
653638fd1498Szrj       && rtx_equal_p (true_rtx, XEXP (cond, 0))
653738fd1498Szrj       && ! side_effects_p (true_rtx))
653838fd1498Szrj     switch (true_code)
653938fd1498Szrj       {
654038fd1498Szrj       case GT:
654138fd1498Szrj       case GE:
654238fd1498Szrj 	return simplify_gen_unary (ABS, mode, true_rtx, mode);
654338fd1498Szrj       case LT:
654438fd1498Szrj       case LE:
654538fd1498Szrj 	return
654638fd1498Szrj 	  simplify_gen_unary (NEG, mode,
654738fd1498Szrj 			      simplify_gen_unary (ABS, mode, true_rtx, mode),
654838fd1498Szrj 			      mode);
654938fd1498Szrj       default:
655038fd1498Szrj 	break;
655138fd1498Szrj       }
655238fd1498Szrj 
655338fd1498Szrj   /* Look for MIN or MAX.  */
655438fd1498Szrj 
655538fd1498Szrj   if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
655638fd1498Szrj       && comparison_p
655738fd1498Szrj       && rtx_equal_p (XEXP (cond, 0), true_rtx)
655838fd1498Szrj       && rtx_equal_p (XEXP (cond, 1), false_rtx)
655938fd1498Szrj       && ! side_effects_p (cond))
656038fd1498Szrj     switch (true_code)
656138fd1498Szrj       {
656238fd1498Szrj       case GE:
656338fd1498Szrj       case GT:
656438fd1498Szrj 	return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
656538fd1498Szrj       case LE:
656638fd1498Szrj       case LT:
656738fd1498Szrj 	return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
656838fd1498Szrj       case GEU:
656938fd1498Szrj       case GTU:
657038fd1498Szrj 	return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
657138fd1498Szrj       case LEU:
657238fd1498Szrj       case LTU:
657338fd1498Szrj 	return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
657438fd1498Szrj       default:
657538fd1498Szrj 	break;
657638fd1498Szrj       }
657738fd1498Szrj 
657838fd1498Szrj   /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
657938fd1498Szrj      second operand is zero, this can be done as (OP Z (mult COND C2)) where
658038fd1498Szrj      C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
658138fd1498Szrj      SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
658238fd1498Szrj      We can do this kind of thing in some cases when STORE_FLAG_VALUE is
658338fd1498Szrj      neither 1 or -1, but it isn't worth checking for.  */
658438fd1498Szrj 
658538fd1498Szrj   if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
658638fd1498Szrj       && comparison_p
658738fd1498Szrj       && is_int_mode (mode, &int_mode)
658838fd1498Szrj       && ! side_effects_p (x))
658938fd1498Szrj     {
659038fd1498Szrj       rtx t = make_compound_operation (true_rtx, SET);
659138fd1498Szrj       rtx f = make_compound_operation (false_rtx, SET);
659238fd1498Szrj       rtx cond_op0 = XEXP (cond, 0);
659338fd1498Szrj       rtx cond_op1 = XEXP (cond, 1);
659438fd1498Szrj       enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
659538fd1498Szrj       scalar_int_mode m = int_mode;
659638fd1498Szrj       rtx z = 0, c1 = NULL_RTX;
659738fd1498Szrj 
659838fd1498Szrj       if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
659938fd1498Szrj 	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
660038fd1498Szrj 	   || GET_CODE (t) == ASHIFT
660138fd1498Szrj 	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
660238fd1498Szrj 	  && rtx_equal_p (XEXP (t, 0), f))
660338fd1498Szrj 	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
660438fd1498Szrj 
660538fd1498Szrj       /* If an identity-zero op is commutative, check whether there
660638fd1498Szrj 	 would be a match if we swapped the operands.  */
660738fd1498Szrj       else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
660838fd1498Szrj 		|| GET_CODE (t) == XOR)
660938fd1498Szrj 	       && rtx_equal_p (XEXP (t, 1), f))
661038fd1498Szrj 	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
661138fd1498Szrj       else if (GET_CODE (t) == SIGN_EXTEND
661238fd1498Szrj 	       && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
661338fd1498Szrj 	       && (GET_CODE (XEXP (t, 0)) == PLUS
661438fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == MINUS
661538fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == IOR
661638fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == XOR
661738fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == ASHIFT
661838fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
661938fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
662038fd1498Szrj 	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
662138fd1498Szrj 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
662238fd1498Szrj 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
662338fd1498Szrj 	       && (num_sign_bit_copies (f, GET_MODE (f))
662438fd1498Szrj 		   > (unsigned int)
662538fd1498Szrj 		     (GET_MODE_PRECISION (int_mode)
662638fd1498Szrj 		      - GET_MODE_PRECISION (inner_mode))))
662738fd1498Szrj 	{
662838fd1498Szrj 	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
662938fd1498Szrj 	  extend_op = SIGN_EXTEND;
663038fd1498Szrj 	  m = inner_mode;
663138fd1498Szrj 	}
663238fd1498Szrj       else if (GET_CODE (t) == SIGN_EXTEND
663338fd1498Szrj 	       && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
663438fd1498Szrj 	       && (GET_CODE (XEXP (t, 0)) == PLUS
663538fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == IOR
663638fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == XOR)
663738fd1498Szrj 	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
663838fd1498Szrj 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
663938fd1498Szrj 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
664038fd1498Szrj 	       && (num_sign_bit_copies (f, GET_MODE (f))
664138fd1498Szrj 		   > (unsigned int)
664238fd1498Szrj 		     (GET_MODE_PRECISION (int_mode)
664338fd1498Szrj 		      - GET_MODE_PRECISION (inner_mode))))
664438fd1498Szrj 	{
664538fd1498Szrj 	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
664638fd1498Szrj 	  extend_op = SIGN_EXTEND;
664738fd1498Szrj 	  m = inner_mode;
664838fd1498Szrj 	}
664938fd1498Szrj       else if (GET_CODE (t) == ZERO_EXTEND
665038fd1498Szrj 	       && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
665138fd1498Szrj 	       && (GET_CODE (XEXP (t, 0)) == PLUS
665238fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == MINUS
665338fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == IOR
665438fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == XOR
665538fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == ASHIFT
665638fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
665738fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
665838fd1498Szrj 	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
665938fd1498Szrj 	       && HWI_COMPUTABLE_MODE_P (int_mode)
666038fd1498Szrj 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
666138fd1498Szrj 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
666238fd1498Szrj 	       && ((nonzero_bits (f, GET_MODE (f))
666338fd1498Szrj 		    & ~GET_MODE_MASK (inner_mode))
666438fd1498Szrj 		   == 0))
666538fd1498Szrj 	{
666638fd1498Szrj 	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
666738fd1498Szrj 	  extend_op = ZERO_EXTEND;
666838fd1498Szrj 	  m = inner_mode;
666938fd1498Szrj 	}
667038fd1498Szrj       else if (GET_CODE (t) == ZERO_EXTEND
667138fd1498Szrj 	       && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
667238fd1498Szrj 	       && (GET_CODE (XEXP (t, 0)) == PLUS
667338fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == IOR
667438fd1498Szrj 		   || GET_CODE (XEXP (t, 0)) == XOR)
667538fd1498Szrj 	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
667638fd1498Szrj 	       && HWI_COMPUTABLE_MODE_P (int_mode)
667738fd1498Szrj 	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
667838fd1498Szrj 	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
667938fd1498Szrj 	       && ((nonzero_bits (f, GET_MODE (f))
668038fd1498Szrj 		    & ~GET_MODE_MASK (inner_mode))
668138fd1498Szrj 		   == 0))
668238fd1498Szrj 	{
668338fd1498Szrj 	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
668438fd1498Szrj 	  extend_op = ZERO_EXTEND;
668538fd1498Szrj 	  m = inner_mode;
668638fd1498Szrj 	}
668738fd1498Szrj 
668838fd1498Szrj       if (z)
668938fd1498Szrj 	{
669038fd1498Szrj 	  machine_mode cm = m;
669138fd1498Szrj 	  if ((op == ASHIFT || op == LSHIFTRT || op == ASHIFTRT)
669238fd1498Szrj 	      && GET_MODE (c1) != VOIDmode)
669338fd1498Szrj 	    cm = GET_MODE (c1);
669438fd1498Szrj 	  temp = subst (simplify_gen_relational (true_code, cm, VOIDmode,
669538fd1498Szrj 						 cond_op0, cond_op1),
669638fd1498Szrj 			pc_rtx, pc_rtx, 0, 0, 0);
669738fd1498Szrj 	  temp = simplify_gen_binary (MULT, cm, temp,
669838fd1498Szrj 				      simplify_gen_binary (MULT, cm, c1,
669938fd1498Szrj 							   const_true_rtx));
670038fd1498Szrj 	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0, 0);
670138fd1498Szrj 	  temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
670238fd1498Szrj 
670338fd1498Szrj 	  if (extend_op != UNKNOWN)
670438fd1498Szrj 	    temp = simplify_gen_unary (extend_op, int_mode, temp, m);
670538fd1498Szrj 
670638fd1498Szrj 	  return temp;
670738fd1498Szrj 	}
670838fd1498Szrj     }
670938fd1498Szrj 
671038fd1498Szrj   /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
671138fd1498Szrj      1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
671238fd1498Szrj      negation of a single bit, we can convert this operation to a shift.  We
671338fd1498Szrj      can actually do this more generally, but it doesn't seem worth it.  */
671438fd1498Szrj 
671538fd1498Szrj   if (true_code == NE
671638fd1498Szrj       && is_a <scalar_int_mode> (mode, &int_mode)
671738fd1498Szrj       && XEXP (cond, 1) == const0_rtx
671838fd1498Szrj       && false_rtx == const0_rtx
671938fd1498Szrj       && CONST_INT_P (true_rtx)
672038fd1498Szrj       && ((nonzero_bits (XEXP (cond, 0), int_mode) == 1
672138fd1498Szrj 	   && (i = exact_log2 (UINTVAL (true_rtx))) >= 0)
672238fd1498Szrj 	  || ((num_sign_bit_copies (XEXP (cond, 0), int_mode)
672338fd1498Szrj 	       == GET_MODE_PRECISION (int_mode))
672438fd1498Szrj 	      && (i = exact_log2 (-UINTVAL (true_rtx))) >= 0)))
672538fd1498Szrj     return
672638fd1498Szrj       simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
672738fd1498Szrj 			    gen_lowpart (int_mode, XEXP (cond, 0)), i);
672838fd1498Szrj 
672938fd1498Szrj   /* (IF_THEN_ELSE (NE A 0) C1 0) is A or a zero-extend of A if the only
673038fd1498Szrj      non-zero bit in A is C1.  */
673138fd1498Szrj   if (true_code == NE && XEXP (cond, 1) == const0_rtx
673238fd1498Szrj       && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
673338fd1498Szrj       && is_a <scalar_int_mode> (mode, &int_mode)
673438fd1498Szrj       && is_a <scalar_int_mode> (GET_MODE (XEXP (cond, 0)), &inner_mode)
673538fd1498Szrj       && (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))
673638fd1498Szrj 	  == nonzero_bits (XEXP (cond, 0), inner_mode)
673738fd1498Szrj       && (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))) >= 0)
673838fd1498Szrj     {
673938fd1498Szrj       rtx val = XEXP (cond, 0);
674038fd1498Szrj       if (inner_mode == int_mode)
674138fd1498Szrj         return val;
674238fd1498Szrj       else if (GET_MODE_PRECISION (inner_mode) < GET_MODE_PRECISION (int_mode))
674338fd1498Szrj         return simplify_gen_unary (ZERO_EXTEND, int_mode, val, inner_mode);
674438fd1498Szrj     }
674538fd1498Szrj 
674638fd1498Szrj   return x;
674738fd1498Szrj }
674838fd1498Szrj 
674938fd1498Szrj /* Simplify X, a SET expression.  Return the new expression.  */
675038fd1498Szrj 
675138fd1498Szrj static rtx
simplify_set(rtx x)675238fd1498Szrj simplify_set (rtx x)
675338fd1498Szrj {
675438fd1498Szrj   rtx src = SET_SRC (x);
675538fd1498Szrj   rtx dest = SET_DEST (x);
675638fd1498Szrj   machine_mode mode
675738fd1498Szrj     = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
675838fd1498Szrj   rtx_insn *other_insn;
675938fd1498Szrj   rtx *cc_use;
676038fd1498Szrj   scalar_int_mode int_mode;
676138fd1498Szrj 
676238fd1498Szrj   /* (set (pc) (return)) gets written as (return).  */
676338fd1498Szrj   if (GET_CODE (dest) == PC && ANY_RETURN_P (src))
676438fd1498Szrj     return src;
676538fd1498Szrj 
676638fd1498Szrj   /* Now that we know for sure which bits of SRC we are using, see if we can
676738fd1498Szrj      simplify the expression for the object knowing that we only need the
676838fd1498Szrj      low-order bits.  */
676938fd1498Szrj 
677038fd1498Szrj   if (GET_MODE_CLASS (mode) == MODE_INT && HWI_COMPUTABLE_MODE_P (mode))
677138fd1498Szrj     {
677238fd1498Szrj       src = force_to_mode (src, mode, HOST_WIDE_INT_M1U, 0);
677338fd1498Szrj       SUBST (SET_SRC (x), src);
677438fd1498Szrj     }
677538fd1498Szrj 
677638fd1498Szrj   /* If we are setting CC0 or if the source is a COMPARE, look for the use of
677738fd1498Szrj      the comparison result and try to simplify it unless we already have used
677838fd1498Szrj      undobuf.other_insn.  */
677938fd1498Szrj   if ((GET_MODE_CLASS (mode) == MODE_CC
678038fd1498Szrj        || GET_CODE (src) == COMPARE
678138fd1498Szrj        || CC0_P (dest))
678238fd1498Szrj       && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
678338fd1498Szrj       && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
678438fd1498Szrj       && COMPARISON_P (*cc_use)
678538fd1498Szrj       && rtx_equal_p (XEXP (*cc_use, 0), dest))
678638fd1498Szrj     {
678738fd1498Szrj       enum rtx_code old_code = GET_CODE (*cc_use);
678838fd1498Szrj       enum rtx_code new_code;
678938fd1498Szrj       rtx op0, op1, tmp;
679038fd1498Szrj       int other_changed = 0;
679138fd1498Szrj       rtx inner_compare = NULL_RTX;
679238fd1498Szrj       machine_mode compare_mode = GET_MODE (dest);
679338fd1498Szrj 
679438fd1498Szrj       if (GET_CODE (src) == COMPARE)
679538fd1498Szrj 	{
679638fd1498Szrj 	  op0 = XEXP (src, 0), op1 = XEXP (src, 1);
679738fd1498Szrj 	  if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
679838fd1498Szrj 	    {
679938fd1498Szrj 	      inner_compare = op0;
680038fd1498Szrj 	      op0 = XEXP (inner_compare, 0), op1 = XEXP (inner_compare, 1);
680138fd1498Szrj 	    }
680238fd1498Szrj 	}
680338fd1498Szrj       else
680438fd1498Szrj 	op0 = src, op1 = CONST0_RTX (GET_MODE (src));
680538fd1498Szrj 
680638fd1498Szrj       tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
680738fd1498Szrj 					   op0, op1);
680838fd1498Szrj       if (!tmp)
680938fd1498Szrj 	new_code = old_code;
681038fd1498Szrj       else if (!CONSTANT_P (tmp))
681138fd1498Szrj 	{
681238fd1498Szrj 	  new_code = GET_CODE (tmp);
681338fd1498Szrj 	  op0 = XEXP (tmp, 0);
681438fd1498Szrj 	  op1 = XEXP (tmp, 1);
681538fd1498Szrj 	}
681638fd1498Szrj       else
681738fd1498Szrj 	{
681838fd1498Szrj 	  rtx pat = PATTERN (other_insn);
681938fd1498Szrj 	  undobuf.other_insn = other_insn;
682038fd1498Szrj 	  SUBST (*cc_use, tmp);
682138fd1498Szrj 
682238fd1498Szrj 	  /* Attempt to simplify CC user.  */
682338fd1498Szrj 	  if (GET_CODE (pat) == SET)
682438fd1498Szrj 	    {
682538fd1498Szrj 	      rtx new_rtx = simplify_rtx (SET_SRC (pat));
682638fd1498Szrj 	      if (new_rtx != NULL_RTX)
682738fd1498Szrj 		SUBST (SET_SRC (pat), new_rtx);
682838fd1498Szrj 	    }
682938fd1498Szrj 
683038fd1498Szrj 	  /* Convert X into a no-op move.  */
683138fd1498Szrj 	  SUBST (SET_DEST (x), pc_rtx);
683238fd1498Szrj 	  SUBST (SET_SRC (x), pc_rtx);
683338fd1498Szrj 	  return x;
683438fd1498Szrj 	}
683538fd1498Szrj 
683638fd1498Szrj       /* Simplify our comparison, if possible.  */
683738fd1498Szrj       new_code = simplify_comparison (new_code, &op0, &op1);
683838fd1498Szrj 
683938fd1498Szrj #ifdef SELECT_CC_MODE
684038fd1498Szrj       /* If this machine has CC modes other than CCmode, check to see if we
684138fd1498Szrj 	 need to use a different CC mode here.  */
684238fd1498Szrj       if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
684338fd1498Szrj 	compare_mode = GET_MODE (op0);
684438fd1498Szrj       else if (inner_compare
684538fd1498Szrj 	       && GET_MODE_CLASS (GET_MODE (inner_compare)) == MODE_CC
684638fd1498Szrj 	       && new_code == old_code
684738fd1498Szrj 	       && op0 == XEXP (inner_compare, 0)
684838fd1498Szrj 	       && op1 == XEXP (inner_compare, 1))
684938fd1498Szrj 	compare_mode = GET_MODE (inner_compare);
685038fd1498Szrj       else
685138fd1498Szrj 	compare_mode = SELECT_CC_MODE (new_code, op0, op1);
685238fd1498Szrj 
685338fd1498Szrj       /* If the mode changed, we have to change SET_DEST, the mode in the
685438fd1498Szrj 	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
685538fd1498Szrj 	 a hard register, just build new versions with the proper mode.  If it
685638fd1498Szrj 	 is a pseudo, we lose unless it is only time we set the pseudo, in
685738fd1498Szrj 	 which case we can safely change its mode.  */
685838fd1498Szrj       if (!HAVE_cc0 && compare_mode != GET_MODE (dest))
685938fd1498Szrj 	{
686038fd1498Szrj 	  if (can_change_dest_mode (dest, 0, compare_mode))
686138fd1498Szrj 	    {
686238fd1498Szrj 	      unsigned int regno = REGNO (dest);
686338fd1498Szrj 	      rtx new_dest;
686438fd1498Szrj 
686538fd1498Szrj 	      if (regno < FIRST_PSEUDO_REGISTER)
686638fd1498Szrj 		new_dest = gen_rtx_REG (compare_mode, regno);
686738fd1498Szrj 	      else
686838fd1498Szrj 		{
686938fd1498Szrj 		  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
687038fd1498Szrj 		  new_dest = regno_reg_rtx[regno];
687138fd1498Szrj 		}
687238fd1498Szrj 
687338fd1498Szrj 	      SUBST (SET_DEST (x), new_dest);
687438fd1498Szrj 	      SUBST (XEXP (*cc_use, 0), new_dest);
687538fd1498Szrj 	      other_changed = 1;
687638fd1498Szrj 
687738fd1498Szrj 	      dest = new_dest;
687838fd1498Szrj 	    }
687938fd1498Szrj 	}
688038fd1498Szrj #endif  /* SELECT_CC_MODE */
688138fd1498Szrj 
688238fd1498Szrj       /* If the code changed, we have to build a new comparison in
688338fd1498Szrj 	 undobuf.other_insn.  */
688438fd1498Szrj       if (new_code != old_code)
688538fd1498Szrj 	{
688638fd1498Szrj 	  int other_changed_previously = other_changed;
688738fd1498Szrj 	  unsigned HOST_WIDE_INT mask;
688838fd1498Szrj 	  rtx old_cc_use = *cc_use;
688938fd1498Szrj 
689038fd1498Szrj 	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
689138fd1498Szrj 					  dest, const0_rtx));
689238fd1498Szrj 	  other_changed = 1;
689338fd1498Szrj 
689438fd1498Szrj 	  /* If the only change we made was to change an EQ into an NE or
689538fd1498Szrj 	     vice versa, OP0 has only one bit that might be nonzero, and OP1
689638fd1498Szrj 	     is zero, check if changing the user of the condition code will
689738fd1498Szrj 	     produce a valid insn.  If it won't, we can keep the original code
689838fd1498Szrj 	     in that insn by surrounding our operation with an XOR.  */
689938fd1498Szrj 
690038fd1498Szrj 	  if (((old_code == NE && new_code == EQ)
690138fd1498Szrj 	       || (old_code == EQ && new_code == NE))
690238fd1498Szrj 	      && ! other_changed_previously && op1 == const0_rtx
690338fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
690438fd1498Szrj 	      && pow2p_hwi (mask = nonzero_bits (op0, GET_MODE (op0))))
690538fd1498Szrj 	    {
690638fd1498Szrj 	      rtx pat = PATTERN (other_insn), note = 0;
690738fd1498Szrj 
690838fd1498Szrj 	      if ((recog_for_combine (&pat, other_insn, &note) < 0
690938fd1498Szrj 		   && ! check_asm_operands (pat)))
691038fd1498Szrj 		{
691138fd1498Szrj 		  *cc_use = old_cc_use;
691238fd1498Szrj 		  other_changed = 0;
691338fd1498Szrj 
691438fd1498Szrj 		  op0 = simplify_gen_binary (XOR, GET_MODE (op0), op0,
691538fd1498Szrj 					     gen_int_mode (mask,
691638fd1498Szrj 							   GET_MODE (op0)));
691738fd1498Szrj 		}
691838fd1498Szrj 	    }
691938fd1498Szrj 	}
692038fd1498Szrj 
692138fd1498Szrj       if (other_changed)
692238fd1498Szrj 	undobuf.other_insn = other_insn;
692338fd1498Szrj 
692438fd1498Szrj       /* Don't generate a compare of a CC with 0, just use that CC.  */
692538fd1498Szrj       if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
692638fd1498Szrj 	{
692738fd1498Szrj 	  SUBST (SET_SRC (x), op0);
692838fd1498Szrj 	  src = SET_SRC (x);
692938fd1498Szrj 	}
693038fd1498Szrj       /* Otherwise, if we didn't previously have the same COMPARE we
693138fd1498Szrj 	 want, create it from scratch.  */
693238fd1498Szrj       else if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode
693338fd1498Szrj 	       || XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
693438fd1498Szrj 	{
693538fd1498Szrj 	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
693638fd1498Szrj 	  src = SET_SRC (x);
693738fd1498Szrj 	}
693838fd1498Szrj     }
693938fd1498Szrj   else
694038fd1498Szrj     {
694138fd1498Szrj       /* Get SET_SRC in a form where we have placed back any
694238fd1498Szrj 	 compound expressions.  Then do the checks below.  */
694338fd1498Szrj       src = make_compound_operation (src, SET);
694438fd1498Szrj       SUBST (SET_SRC (x), src);
694538fd1498Szrj     }
694638fd1498Szrj 
694738fd1498Szrj   /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
694838fd1498Szrj      and X being a REG or (subreg (reg)), we may be able to convert this to
694938fd1498Szrj      (set (subreg:m2 x) (op)).
695038fd1498Szrj 
695138fd1498Szrj      We can always do this if M1 is narrower than M2 because that means that
695238fd1498Szrj      we only care about the low bits of the result.
695338fd1498Szrj 
695438fd1498Szrj      However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
695538fd1498Szrj      perform a narrower operation than requested since the high-order bits will
695638fd1498Szrj      be undefined.  On machine where it is defined, this transformation is safe
695738fd1498Szrj      as long as M1 and M2 have the same number of words.  */
695838fd1498Szrj 
695938fd1498Szrj   if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
696038fd1498Szrj       && !OBJECT_P (SUBREG_REG (src))
696138fd1498Szrj       && (known_equal_after_align_up
696238fd1498Szrj 	  (GET_MODE_SIZE (GET_MODE (src)),
696338fd1498Szrj 	   GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))),
696438fd1498Szrj 	   UNITS_PER_WORD))
696538fd1498Szrj       && (WORD_REGISTER_OPERATIONS || !paradoxical_subreg_p (src))
696638fd1498Szrj       && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
696738fd1498Szrj 	    && !REG_CAN_CHANGE_MODE_P (REGNO (dest),
696838fd1498Szrj 				       GET_MODE (SUBREG_REG (src)),
696938fd1498Szrj 				       GET_MODE (src)))
697038fd1498Szrj       && (REG_P (dest)
697138fd1498Szrj 	  || (GET_CODE (dest) == SUBREG
697238fd1498Szrj 	      && REG_P (SUBREG_REG (dest)))))
697338fd1498Szrj     {
697438fd1498Szrj       SUBST (SET_DEST (x),
697538fd1498Szrj 	     gen_lowpart (GET_MODE (SUBREG_REG (src)),
697638fd1498Szrj 				      dest));
697738fd1498Szrj       SUBST (SET_SRC (x), SUBREG_REG (src));
697838fd1498Szrj 
697938fd1498Szrj       src = SET_SRC (x), dest = SET_DEST (x);
698038fd1498Szrj     }
698138fd1498Szrj 
698238fd1498Szrj   /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
698338fd1498Szrj      in SRC.  */
698438fd1498Szrj   if (dest == cc0_rtx
698538fd1498Szrj       && partial_subreg_p (src)
698638fd1498Szrj       && subreg_lowpart_p (src))
698738fd1498Szrj     {
698838fd1498Szrj       rtx inner = SUBREG_REG (src);
698938fd1498Szrj       machine_mode inner_mode = GET_MODE (inner);
699038fd1498Szrj 
699138fd1498Szrj       /* Here we make sure that we don't have a sign bit on.  */
699238fd1498Szrj       if (val_signbit_known_clear_p (GET_MODE (src),
699338fd1498Szrj 				     nonzero_bits (inner, inner_mode)))
699438fd1498Szrj 	{
699538fd1498Szrj 	  SUBST (SET_SRC (x), inner);
699638fd1498Szrj 	  src = SET_SRC (x);
699738fd1498Szrj 	}
699838fd1498Szrj     }
699938fd1498Szrj 
700038fd1498Szrj   /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
700138fd1498Szrj      would require a paradoxical subreg.  Replace the subreg with a
700238fd1498Szrj      zero_extend to avoid the reload that would otherwise be required.
700338fd1498Szrj      Don't do this unless we have a scalar integer mode, otherwise the
700438fd1498Szrj      transformation is incorrect.  */
700538fd1498Szrj 
700638fd1498Szrj   enum rtx_code extend_op;
700738fd1498Szrj   if (paradoxical_subreg_p (src)
700838fd1498Szrj       && MEM_P (SUBREG_REG (src))
700938fd1498Szrj       && SCALAR_INT_MODE_P (GET_MODE (src))
701038fd1498Szrj       && (extend_op = load_extend_op (GET_MODE (SUBREG_REG (src)))) != UNKNOWN)
701138fd1498Szrj     {
701238fd1498Szrj       SUBST (SET_SRC (x),
701338fd1498Szrj 	     gen_rtx_fmt_e (extend_op, GET_MODE (src), SUBREG_REG (src)));
701438fd1498Szrj 
701538fd1498Szrj       src = SET_SRC (x);
701638fd1498Szrj     }
701738fd1498Szrj 
701838fd1498Szrj   /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
701938fd1498Szrj      are comparing an item known to be 0 or -1 against 0, use a logical
702038fd1498Szrj      operation instead. Check for one of the arms being an IOR of the other
702138fd1498Szrj      arm with some value.  We compute three terms to be IOR'ed together.  In
702238fd1498Szrj      practice, at most two will be nonzero.  Then we do the IOR's.  */
702338fd1498Szrj 
702438fd1498Szrj   if (GET_CODE (dest) != PC
702538fd1498Szrj       && GET_CODE (src) == IF_THEN_ELSE
702638fd1498Szrj       && is_int_mode (GET_MODE (src), &int_mode)
702738fd1498Szrj       && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
702838fd1498Szrj       && XEXP (XEXP (src, 0), 1) == const0_rtx
702938fd1498Szrj       && int_mode == GET_MODE (XEXP (XEXP (src, 0), 0))
703038fd1498Szrj       && (!HAVE_conditional_move
703138fd1498Szrj 	  || ! can_conditionally_move_p (int_mode))
703238fd1498Szrj       && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), int_mode)
703338fd1498Szrj 	  == GET_MODE_PRECISION (int_mode))
703438fd1498Szrj       && ! side_effects_p (src))
703538fd1498Szrj     {
703638fd1498Szrj       rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
703738fd1498Szrj 		      ? XEXP (src, 1) : XEXP (src, 2));
703838fd1498Szrj       rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
703938fd1498Szrj 		   ? XEXP (src, 2) : XEXP (src, 1));
704038fd1498Szrj       rtx term1 = const0_rtx, term2, term3;
704138fd1498Szrj 
704238fd1498Szrj       if (GET_CODE (true_rtx) == IOR
704338fd1498Szrj 	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
704438fd1498Szrj 	term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
704538fd1498Szrj       else if (GET_CODE (true_rtx) == IOR
704638fd1498Szrj 	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
704738fd1498Szrj 	term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
704838fd1498Szrj       else if (GET_CODE (false_rtx) == IOR
704938fd1498Szrj 	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
705038fd1498Szrj 	term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
705138fd1498Szrj       else if (GET_CODE (false_rtx) == IOR
705238fd1498Szrj 	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
705338fd1498Szrj 	term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
705438fd1498Szrj 
705538fd1498Szrj       term2 = simplify_gen_binary (AND, int_mode,
705638fd1498Szrj 				   XEXP (XEXP (src, 0), 0), true_rtx);
705738fd1498Szrj       term3 = simplify_gen_binary (AND, int_mode,
705838fd1498Szrj 				   simplify_gen_unary (NOT, int_mode,
705938fd1498Szrj 						       XEXP (XEXP (src, 0), 0),
706038fd1498Szrj 						       int_mode),
706138fd1498Szrj 				   false_rtx);
706238fd1498Szrj 
706338fd1498Szrj       SUBST (SET_SRC (x),
706438fd1498Szrj 	     simplify_gen_binary (IOR, int_mode,
706538fd1498Szrj 				  simplify_gen_binary (IOR, int_mode,
706638fd1498Szrj 						       term1, term2),
706738fd1498Szrj 				  term3));
706838fd1498Szrj 
706938fd1498Szrj       src = SET_SRC (x);
707038fd1498Szrj     }
707138fd1498Szrj 
707238fd1498Szrj   /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
707338fd1498Szrj      whole thing fail.  */
707438fd1498Szrj   if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
707538fd1498Szrj     return src;
707638fd1498Szrj   else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
707738fd1498Szrj     return dest;
707838fd1498Szrj   else
707938fd1498Szrj     /* Convert this into a field assignment operation, if possible.  */
708038fd1498Szrj     return make_field_assignment (x);
708138fd1498Szrj }
708238fd1498Szrj 
708338fd1498Szrj /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
708438fd1498Szrj    result.  */
708538fd1498Szrj 
708638fd1498Szrj static rtx
simplify_logical(rtx x)708738fd1498Szrj simplify_logical (rtx x)
708838fd1498Szrj {
708938fd1498Szrj   rtx op0 = XEXP (x, 0);
709038fd1498Szrj   rtx op1 = XEXP (x, 1);
709138fd1498Szrj   scalar_int_mode mode;
709238fd1498Szrj 
709338fd1498Szrj   switch (GET_CODE (x))
709438fd1498Szrj     {
709538fd1498Szrj     case AND:
709638fd1498Szrj       /* We can call simplify_and_const_int only if we don't lose
709738fd1498Szrj 	 any (sign) bits when converting INTVAL (op1) to
709838fd1498Szrj 	 "unsigned HOST_WIDE_INT".  */
709938fd1498Szrj       if (is_a <scalar_int_mode> (GET_MODE (x), &mode)
710038fd1498Szrj 	  && CONST_INT_P (op1)
710138fd1498Szrj 	  && (HWI_COMPUTABLE_MODE_P (mode)
710238fd1498Szrj 	      || INTVAL (op1) > 0))
710338fd1498Szrj 	{
710438fd1498Szrj 	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
710538fd1498Szrj 	  if (GET_CODE (x) != AND)
710638fd1498Szrj 	    return x;
710738fd1498Szrj 
710838fd1498Szrj 	  op0 = XEXP (x, 0);
710938fd1498Szrj 	  op1 = XEXP (x, 1);
711038fd1498Szrj 	}
711138fd1498Szrj 
711238fd1498Szrj       /* If we have any of (and (ior A B) C) or (and (xor A B) C),
711338fd1498Szrj 	 apply the distributive law and then the inverse distributive
711438fd1498Szrj 	 law to see if things simplify.  */
711538fd1498Szrj       if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
711638fd1498Szrj 	{
711738fd1498Szrj 	  rtx result = distribute_and_simplify_rtx (x, 0);
711838fd1498Szrj 	  if (result)
711938fd1498Szrj 	    return result;
712038fd1498Szrj 	}
712138fd1498Szrj       if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
712238fd1498Szrj 	{
712338fd1498Szrj 	  rtx result = distribute_and_simplify_rtx (x, 1);
712438fd1498Szrj 	  if (result)
712538fd1498Szrj 	    return result;
712638fd1498Szrj 	}
712738fd1498Szrj       break;
712838fd1498Szrj 
712938fd1498Szrj     case IOR:
713038fd1498Szrj       /* If we have (ior (and A B) C), apply the distributive law and then
713138fd1498Szrj 	 the inverse distributive law to see if things simplify.  */
713238fd1498Szrj 
713338fd1498Szrj       if (GET_CODE (op0) == AND)
713438fd1498Szrj 	{
713538fd1498Szrj 	  rtx result = distribute_and_simplify_rtx (x, 0);
713638fd1498Szrj 	  if (result)
713738fd1498Szrj 	    return result;
713838fd1498Szrj 	}
713938fd1498Szrj 
714038fd1498Szrj       if (GET_CODE (op1) == AND)
714138fd1498Szrj 	{
714238fd1498Szrj 	  rtx result = distribute_and_simplify_rtx (x, 1);
714338fd1498Szrj 	  if (result)
714438fd1498Szrj 	    return result;
714538fd1498Szrj 	}
714638fd1498Szrj       break;
714738fd1498Szrj 
714838fd1498Szrj     default:
714938fd1498Szrj       gcc_unreachable ();
715038fd1498Szrj     }
715138fd1498Szrj 
715238fd1498Szrj   return x;
715338fd1498Szrj }
715438fd1498Szrj 
715538fd1498Szrj /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
715638fd1498Szrj    operations" because they can be replaced with two more basic operations.
715738fd1498Szrj    ZERO_EXTEND is also considered "compound" because it can be replaced with
715838fd1498Szrj    an AND operation, which is simpler, though only one operation.
715938fd1498Szrj 
716038fd1498Szrj    The function expand_compound_operation is called with an rtx expression
716138fd1498Szrj    and will convert it to the appropriate shifts and AND operations,
716238fd1498Szrj    simplifying at each stage.
716338fd1498Szrj 
716438fd1498Szrj    The function make_compound_operation is called to convert an expression
716538fd1498Szrj    consisting of shifts and ANDs into the equivalent compound expression.
716638fd1498Szrj    It is the inverse of this function, loosely speaking.  */
716738fd1498Szrj 
716838fd1498Szrj static rtx
expand_compound_operation(rtx x)716938fd1498Szrj expand_compound_operation (rtx x)
717038fd1498Szrj {
717138fd1498Szrj   unsigned HOST_WIDE_INT pos = 0, len;
717238fd1498Szrj   int unsignedp = 0;
717338fd1498Szrj   unsigned int modewidth;
717438fd1498Szrj   rtx tem;
717538fd1498Szrj   scalar_int_mode inner_mode;
717638fd1498Szrj 
717738fd1498Szrj   switch (GET_CODE (x))
717838fd1498Szrj     {
717938fd1498Szrj     case ZERO_EXTEND:
718038fd1498Szrj       unsignedp = 1;
718138fd1498Szrj       /* FALLTHRU */
718238fd1498Szrj     case SIGN_EXTEND:
718338fd1498Szrj       /* We can't necessarily use a const_int for a multiword mode;
718438fd1498Szrj 	 it depends on implicitly extending the value.
718538fd1498Szrj 	 Since we don't know the right way to extend it,
718638fd1498Szrj 	 we can't tell whether the implicit way is right.
718738fd1498Szrj 
718838fd1498Szrj 	 Even for a mode that is no wider than a const_int,
718938fd1498Szrj 	 we can't win, because we need to sign extend one of its bits through
719038fd1498Szrj 	 the rest of it, and we don't know which bit.  */
719138fd1498Szrj       if (CONST_INT_P (XEXP (x, 0)))
719238fd1498Szrj 	return x;
719338fd1498Szrj 
719438fd1498Szrj       /* Reject modes that aren't scalar integers because turning vector
719538fd1498Szrj 	 or complex modes into shifts causes problems.  */
719638fd1498Szrj       if (!is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
719738fd1498Szrj 	return x;
719838fd1498Szrj 
719938fd1498Szrj       /* Return if (subreg:MODE FROM 0) is not a safe replacement for
720038fd1498Szrj 	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
720138fd1498Szrj 	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
720238fd1498Szrj 	 reloaded. If not for that, MEM's would very rarely be safe.
720338fd1498Szrj 
720438fd1498Szrj 	 Reject modes bigger than a word, because we might not be able
720538fd1498Szrj 	 to reference a two-register group starting with an arbitrary register
720638fd1498Szrj 	 (and currently gen_lowpart might crash for a SUBREG).  */
720738fd1498Szrj 
720838fd1498Szrj       if (GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
720938fd1498Szrj 	return x;
721038fd1498Szrj 
721138fd1498Szrj       len = GET_MODE_PRECISION (inner_mode);
721238fd1498Szrj       /* If the inner object has VOIDmode (the only way this can happen
721338fd1498Szrj 	 is if it is an ASM_OPERANDS), we can't do anything since we don't
721438fd1498Szrj 	 know how much masking to do.  */
721538fd1498Szrj       if (len == 0)
721638fd1498Szrj 	return x;
721738fd1498Szrj 
721838fd1498Szrj       break;
721938fd1498Szrj 
722038fd1498Szrj     case ZERO_EXTRACT:
722138fd1498Szrj       unsignedp = 1;
722238fd1498Szrj 
722338fd1498Szrj       /* fall through */
722438fd1498Szrj 
722538fd1498Szrj     case SIGN_EXTRACT:
722638fd1498Szrj       /* If the operand is a CLOBBER, just return it.  */
722738fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == CLOBBER)
722838fd1498Szrj 	return XEXP (x, 0);
722938fd1498Szrj 
723038fd1498Szrj       if (!CONST_INT_P (XEXP (x, 1))
723138fd1498Szrj 	  || !CONST_INT_P (XEXP (x, 2)))
723238fd1498Szrj 	return x;
723338fd1498Szrj 
723438fd1498Szrj       /* Reject modes that aren't scalar integers because turning vector
723538fd1498Szrj 	 or complex modes into shifts causes problems.  */
723638fd1498Szrj       if (!is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
723738fd1498Szrj 	return x;
723838fd1498Szrj 
723938fd1498Szrj       len = INTVAL (XEXP (x, 1));
724038fd1498Szrj       pos = INTVAL (XEXP (x, 2));
724138fd1498Szrj 
724238fd1498Szrj       /* This should stay within the object being extracted, fail otherwise.  */
724338fd1498Szrj       if (len + pos > GET_MODE_PRECISION (inner_mode))
724438fd1498Szrj 	return x;
724538fd1498Szrj 
724638fd1498Szrj       if (BITS_BIG_ENDIAN)
724738fd1498Szrj 	pos = GET_MODE_PRECISION (inner_mode) - len - pos;
724838fd1498Szrj 
724938fd1498Szrj       break;
725038fd1498Szrj 
725138fd1498Szrj     default:
725238fd1498Szrj       return x;
725338fd1498Szrj     }
725438fd1498Szrj 
725538fd1498Szrj   /* We've rejected non-scalar operations by now.  */
725638fd1498Szrj   scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (x));
725738fd1498Szrj 
725838fd1498Szrj   /* Convert sign extension to zero extension, if we know that the high
725938fd1498Szrj      bit is not set, as this is easier to optimize.  It will be converted
726038fd1498Szrj      back to cheaper alternative in make_extraction.  */
726138fd1498Szrj   if (GET_CODE (x) == SIGN_EXTEND
726238fd1498Szrj       && HWI_COMPUTABLE_MODE_P (mode)
726338fd1498Szrj       && ((nonzero_bits (XEXP (x, 0), inner_mode)
726438fd1498Szrj 	   & ~(((unsigned HOST_WIDE_INT) GET_MODE_MASK (inner_mode)) >> 1))
726538fd1498Szrj 	  == 0))
726638fd1498Szrj     {
726738fd1498Szrj       rtx temp = gen_rtx_ZERO_EXTEND (mode, XEXP (x, 0));
726838fd1498Szrj       rtx temp2 = expand_compound_operation (temp);
726938fd1498Szrj 
727038fd1498Szrj       /* Make sure this is a profitable operation.  */
727138fd1498Szrj       if (set_src_cost (x, mode, optimize_this_for_speed_p)
727238fd1498Szrj           > set_src_cost (temp2, mode, optimize_this_for_speed_p))
727338fd1498Szrj        return temp2;
727438fd1498Szrj       else if (set_src_cost (x, mode, optimize_this_for_speed_p)
727538fd1498Szrj                > set_src_cost (temp, mode, optimize_this_for_speed_p))
727638fd1498Szrj        return temp;
727738fd1498Szrj       else
727838fd1498Szrj        return x;
727938fd1498Szrj     }
728038fd1498Szrj 
728138fd1498Szrj   /* We can optimize some special cases of ZERO_EXTEND.  */
728238fd1498Szrj   if (GET_CODE (x) == ZERO_EXTEND)
728338fd1498Szrj     {
728438fd1498Szrj       /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
728538fd1498Szrj 	 know that the last value didn't have any inappropriate bits
728638fd1498Szrj 	 set.  */
728738fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == TRUNCATE
728838fd1498Szrj 	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode
728938fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (mode)
729038fd1498Szrj 	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode)
729138fd1498Szrj 	      & ~GET_MODE_MASK (inner_mode)) == 0)
729238fd1498Szrj 	return XEXP (XEXP (x, 0), 0);
729338fd1498Szrj 
729438fd1498Szrj       /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
729538fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == SUBREG
729638fd1498Szrj 	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == mode
729738fd1498Szrj 	  && subreg_lowpart_p (XEXP (x, 0))
729838fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (mode)
729938fd1498Szrj 	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), mode)
730038fd1498Szrj 	      & ~GET_MODE_MASK (inner_mode)) == 0)
730138fd1498Szrj 	return SUBREG_REG (XEXP (x, 0));
730238fd1498Szrj 
730338fd1498Szrj       /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
730438fd1498Szrj 	 is a comparison and STORE_FLAG_VALUE permits.  This is like
730538fd1498Szrj 	 the first case, but it works even when MODE is larger
730638fd1498Szrj 	 than HOST_WIDE_INT.  */
730738fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == TRUNCATE
730838fd1498Szrj 	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode
730938fd1498Szrj 	  && COMPARISON_P (XEXP (XEXP (x, 0), 0))
731038fd1498Szrj 	  && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
731138fd1498Szrj 	  && (STORE_FLAG_VALUE & ~GET_MODE_MASK (inner_mode)) == 0)
731238fd1498Szrj 	return XEXP (XEXP (x, 0), 0);
731338fd1498Szrj 
731438fd1498Szrj       /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
731538fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == SUBREG
731638fd1498Szrj 	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == mode
731738fd1498Szrj 	  && subreg_lowpart_p (XEXP (x, 0))
731838fd1498Szrj 	  && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
731938fd1498Szrj 	  && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
732038fd1498Szrj 	  && (STORE_FLAG_VALUE & ~GET_MODE_MASK (inner_mode)) == 0)
732138fd1498Szrj 	return SUBREG_REG (XEXP (x, 0));
732238fd1498Szrj 
732338fd1498Szrj     }
732438fd1498Szrj 
732538fd1498Szrj   /* If we reach here, we want to return a pair of shifts.  The inner
732638fd1498Szrj      shift is a left shift of BITSIZE - POS - LEN bits.  The outer
732738fd1498Szrj      shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
732838fd1498Szrj      logical depending on the value of UNSIGNEDP.
732938fd1498Szrj 
733038fd1498Szrj      If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
733138fd1498Szrj      converted into an AND of a shift.
733238fd1498Szrj 
733338fd1498Szrj      We must check for the case where the left shift would have a negative
733438fd1498Szrj      count.  This can happen in a case like (x >> 31) & 255 on machines
733538fd1498Szrj      that can't shift by a constant.  On those machines, we would first
733638fd1498Szrj      combine the shift with the AND to produce a variable-position
733738fd1498Szrj      extraction.  Then the constant of 31 would be substituted in
733838fd1498Szrj      to produce such a position.  */
733938fd1498Szrj 
734038fd1498Szrj   modewidth = GET_MODE_PRECISION (mode);
734138fd1498Szrj   if (modewidth >= pos + len)
734238fd1498Szrj     {
734338fd1498Szrj       tem = gen_lowpart (mode, XEXP (x, 0));
734438fd1498Szrj       if (!tem || GET_CODE (tem) == CLOBBER)
734538fd1498Szrj 	return x;
734638fd1498Szrj       tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
734738fd1498Szrj 				  tem, modewidth - pos - len);
734838fd1498Szrj       tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
734938fd1498Szrj 				  mode, tem, modewidth - len);
735038fd1498Szrj     }
735138fd1498Szrj   else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
735238fd1498Szrj     tem = simplify_and_const_int (NULL_RTX, mode,
735338fd1498Szrj 				  simplify_shift_const (NULL_RTX, LSHIFTRT,
735438fd1498Szrj 							mode, XEXP (x, 0),
735538fd1498Szrj 							pos),
735638fd1498Szrj 				  (HOST_WIDE_INT_1U << len) - 1);
735738fd1498Szrj   else
735838fd1498Szrj     /* Any other cases we can't handle.  */
735938fd1498Szrj     return x;
736038fd1498Szrj 
736138fd1498Szrj   /* If we couldn't do this for some reason, return the original
736238fd1498Szrj      expression.  */
736338fd1498Szrj   if (GET_CODE (tem) == CLOBBER)
736438fd1498Szrj     return x;
736538fd1498Szrj 
736638fd1498Szrj   return tem;
736738fd1498Szrj }
736838fd1498Szrj 
736938fd1498Szrj /* X is a SET which contains an assignment of one object into
737038fd1498Szrj    a part of another (such as a bit-field assignment, STRICT_LOW_PART,
737138fd1498Szrj    or certain SUBREGS). If possible, convert it into a series of
737238fd1498Szrj    logical operations.
737338fd1498Szrj 
737438fd1498Szrj    We half-heartedly support variable positions, but do not at all
737538fd1498Szrj    support variable lengths.  */
737638fd1498Szrj 
737738fd1498Szrj static const_rtx
expand_field_assignment(const_rtx x)737838fd1498Szrj expand_field_assignment (const_rtx x)
737938fd1498Szrj {
738038fd1498Szrj   rtx inner;
738138fd1498Szrj   rtx pos;			/* Always counts from low bit.  */
738238fd1498Szrj   int len, inner_len;
738338fd1498Szrj   rtx mask, cleared, masked;
738438fd1498Szrj   scalar_int_mode compute_mode;
738538fd1498Szrj 
738638fd1498Szrj   /* Loop until we find something we can't simplify.  */
738738fd1498Szrj   while (1)
738838fd1498Szrj     {
738938fd1498Szrj       if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
739038fd1498Szrj 	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
739138fd1498Szrj 	{
739238fd1498Szrj 	  rtx x0 = XEXP (SET_DEST (x), 0);
739338fd1498Szrj 	  if (!GET_MODE_PRECISION (GET_MODE (x0)).is_constant (&len))
739438fd1498Szrj 	    break;
739538fd1498Szrj 	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
739638fd1498Szrj 	  pos = gen_int_mode (subreg_lsb (XEXP (SET_DEST (x), 0)),
739738fd1498Szrj 			      MAX_MODE_INT);
739838fd1498Szrj 	}
739938fd1498Szrj       else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
740038fd1498Szrj 	       && CONST_INT_P (XEXP (SET_DEST (x), 1)))
740138fd1498Szrj 	{
740238fd1498Szrj 	  inner = XEXP (SET_DEST (x), 0);
740338fd1498Szrj 	  if (!GET_MODE_PRECISION (GET_MODE (inner)).is_constant (&inner_len))
740438fd1498Szrj 	    break;
740538fd1498Szrj 
740638fd1498Szrj 	  len = INTVAL (XEXP (SET_DEST (x), 1));
740738fd1498Szrj 	  pos = XEXP (SET_DEST (x), 2);
740838fd1498Szrj 
740938fd1498Szrj 	  /* A constant position should stay within the width of INNER.  */
741038fd1498Szrj 	  if (CONST_INT_P (pos) && INTVAL (pos) + len > inner_len)
741138fd1498Szrj 	    break;
741238fd1498Szrj 
741338fd1498Szrj 	  if (BITS_BIG_ENDIAN)
741438fd1498Szrj 	    {
741538fd1498Szrj 	      if (CONST_INT_P (pos))
741638fd1498Szrj 		pos = GEN_INT (inner_len - len - INTVAL (pos));
741738fd1498Szrj 	      else if (GET_CODE (pos) == MINUS
741838fd1498Szrj 		       && CONST_INT_P (XEXP (pos, 1))
741938fd1498Szrj 		       && INTVAL (XEXP (pos, 1)) == inner_len - len)
742038fd1498Szrj 		/* If position is ADJUST - X, new position is X.  */
742138fd1498Szrj 		pos = XEXP (pos, 0);
742238fd1498Szrj 	      else
742338fd1498Szrj 		pos = simplify_gen_binary (MINUS, GET_MODE (pos),
742438fd1498Szrj 					   gen_int_mode (inner_len - len,
742538fd1498Szrj 							 GET_MODE (pos)),
742638fd1498Szrj 					   pos);
742738fd1498Szrj 	    }
742838fd1498Szrj 	}
742938fd1498Szrj 
743038fd1498Szrj       /* If the destination is a subreg that overwrites the whole of the inner
743138fd1498Szrj 	 register, we can move the subreg to the source.  */
743238fd1498Szrj       else if (GET_CODE (SET_DEST (x)) == SUBREG
743338fd1498Szrj 	       /* We need SUBREGs to compute nonzero_bits properly.  */
743438fd1498Szrj 	       && nonzero_sign_valid
743538fd1498Szrj 	       && !read_modify_subreg_p (SET_DEST (x)))
743638fd1498Szrj 	{
743738fd1498Szrj 	  x = gen_rtx_SET (SUBREG_REG (SET_DEST (x)),
743838fd1498Szrj 			   gen_lowpart
743938fd1498Szrj 			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
744038fd1498Szrj 			    SET_SRC (x)));
744138fd1498Szrj 	  continue;
744238fd1498Szrj 	}
744338fd1498Szrj       else
744438fd1498Szrj 	break;
744538fd1498Szrj 
744638fd1498Szrj       while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
744738fd1498Szrj 	inner = SUBREG_REG (inner);
744838fd1498Szrj 
744938fd1498Szrj       /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
745038fd1498Szrj       if (!is_a <scalar_int_mode> (GET_MODE (inner), &compute_mode))
745138fd1498Szrj 	{
745238fd1498Szrj 	  /* Don't do anything for vector or complex integral types.  */
745338fd1498Szrj 	  if (! FLOAT_MODE_P (GET_MODE (inner)))
745438fd1498Szrj 	    break;
745538fd1498Szrj 
745638fd1498Szrj 	  /* Try to find an integral mode to pun with.  */
745738fd1498Szrj 	  if (!int_mode_for_size (GET_MODE_BITSIZE (GET_MODE (inner)), 0)
745838fd1498Szrj 	      .exists (&compute_mode))
745938fd1498Szrj 	    break;
746038fd1498Szrj 
746138fd1498Szrj 	  inner = gen_lowpart (compute_mode, inner);
746238fd1498Szrj 	}
746338fd1498Szrj 
746438fd1498Szrj       /* Compute a mask of LEN bits, if we can do this on the host machine.  */
746538fd1498Szrj       if (len >= HOST_BITS_PER_WIDE_INT)
746638fd1498Szrj 	break;
746738fd1498Szrj 
746838fd1498Szrj       /* Don't try to compute in too wide unsupported modes.  */
746938fd1498Szrj       if (!targetm.scalar_mode_supported_p (compute_mode))
747038fd1498Szrj 	break;
747138fd1498Szrj 
747238fd1498Szrj       /* Now compute the equivalent expression.  Make a copy of INNER
747338fd1498Szrj 	 for the SET_DEST in case it is a MEM into which we will substitute;
747438fd1498Szrj 	 we don't want shared RTL in that case.  */
747538fd1498Szrj       mask = gen_int_mode ((HOST_WIDE_INT_1U << len) - 1,
747638fd1498Szrj 			   compute_mode);
747738fd1498Szrj       cleared = simplify_gen_binary (AND, compute_mode,
747838fd1498Szrj 				     simplify_gen_unary (NOT, compute_mode,
747938fd1498Szrj 				       simplify_gen_binary (ASHIFT,
748038fd1498Szrj 							    compute_mode,
748138fd1498Szrj 							    mask, pos),
748238fd1498Szrj 				       compute_mode),
748338fd1498Szrj 				     inner);
748438fd1498Szrj       masked = simplify_gen_binary (ASHIFT, compute_mode,
748538fd1498Szrj 				    simplify_gen_binary (
748638fd1498Szrj 				      AND, compute_mode,
748738fd1498Szrj 				      gen_lowpart (compute_mode, SET_SRC (x)),
748838fd1498Szrj 				      mask),
748938fd1498Szrj 				    pos);
749038fd1498Szrj 
749138fd1498Szrj       x = gen_rtx_SET (copy_rtx (inner),
749238fd1498Szrj 		       simplify_gen_binary (IOR, compute_mode,
749338fd1498Szrj 					    cleared, masked));
749438fd1498Szrj     }
749538fd1498Szrj 
749638fd1498Szrj   return x;
749738fd1498Szrj }
749838fd1498Szrj 
749938fd1498Szrj /* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
750038fd1498Szrj    it is an RTX that represents the (variable) starting position; otherwise,
750138fd1498Szrj    POS is the (constant) starting bit position.  Both are counted from the LSB.
750238fd1498Szrj 
750338fd1498Szrj    UNSIGNEDP is nonzero for an unsigned reference and zero for a signed one.
750438fd1498Szrj 
750538fd1498Szrj    IN_DEST is nonzero if this is a reference in the destination of a SET.
750638fd1498Szrj    This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
750738fd1498Szrj    a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
750838fd1498Szrj    be used.
750938fd1498Szrj 
751038fd1498Szrj    IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
751138fd1498Szrj    ZERO_EXTRACT should be built even for bits starting at bit 0.
751238fd1498Szrj 
751338fd1498Szrj    MODE is the desired mode of the result (if IN_DEST == 0).
751438fd1498Szrj 
751538fd1498Szrj    The result is an RTX for the extraction or NULL_RTX if the target
751638fd1498Szrj    can't handle it.  */
751738fd1498Szrj 
751838fd1498Szrj static rtx
make_extraction(machine_mode mode,rtx inner,HOST_WIDE_INT pos,rtx pos_rtx,unsigned HOST_WIDE_INT len,int unsignedp,int in_dest,int in_compare)751938fd1498Szrj make_extraction (machine_mode mode, rtx inner, HOST_WIDE_INT pos,
752038fd1498Szrj 		 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
752138fd1498Szrj 		 int in_dest, int in_compare)
752238fd1498Szrj {
752338fd1498Szrj   /* This mode describes the size of the storage area
752438fd1498Szrj      to fetch the overall value from.  Within that, we
752538fd1498Szrj      ignore the POS lowest bits, etc.  */
752638fd1498Szrj   machine_mode is_mode = GET_MODE (inner);
752738fd1498Szrj   machine_mode inner_mode;
752838fd1498Szrj   scalar_int_mode wanted_inner_mode;
752938fd1498Szrj   scalar_int_mode wanted_inner_reg_mode = word_mode;
753038fd1498Szrj   scalar_int_mode pos_mode = word_mode;
753138fd1498Szrj   machine_mode extraction_mode = word_mode;
753238fd1498Szrj   rtx new_rtx = 0;
753338fd1498Szrj   rtx orig_pos_rtx = pos_rtx;
753438fd1498Szrj   HOST_WIDE_INT orig_pos;
753538fd1498Szrj 
753638fd1498Szrj   if (pos_rtx && CONST_INT_P (pos_rtx))
753738fd1498Szrj     pos = INTVAL (pos_rtx), pos_rtx = 0;
753838fd1498Szrj 
753938fd1498Szrj   if (GET_CODE (inner) == SUBREG
754038fd1498Szrj       && subreg_lowpart_p (inner)
754138fd1498Szrj       && (paradoxical_subreg_p (inner)
754238fd1498Szrj 	  /* If trying or potentionally trying to extract
754338fd1498Szrj 	     bits outside of is_mode, don't look through
754438fd1498Szrj 	     non-paradoxical SUBREGs.  See PR82192.  */
754538fd1498Szrj 	  || (pos_rtx == NULL_RTX
754638fd1498Szrj 	      && known_le (pos + len, GET_MODE_PRECISION (is_mode)))))
754738fd1498Szrj     {
754838fd1498Szrj       /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
754938fd1498Szrj 	 consider just the QI as the memory to extract from.
755038fd1498Szrj 	 The subreg adds or removes high bits; its mode is
755138fd1498Szrj 	 irrelevant to the meaning of this extraction,
755238fd1498Szrj 	 since POS and LEN count from the lsb.  */
755338fd1498Szrj       if (MEM_P (SUBREG_REG (inner)))
755438fd1498Szrj 	is_mode = GET_MODE (SUBREG_REG (inner));
755538fd1498Szrj       inner = SUBREG_REG (inner);
755638fd1498Szrj     }
755738fd1498Szrj   else if (GET_CODE (inner) == ASHIFT
755838fd1498Szrj 	   && CONST_INT_P (XEXP (inner, 1))
755938fd1498Szrj 	   && pos_rtx == 0 && pos == 0
756038fd1498Szrj 	   && len > UINTVAL (XEXP (inner, 1)))
756138fd1498Szrj     {
756238fd1498Szrj       /* We're extracting the least significant bits of an rtx
756338fd1498Szrj 	 (ashift X (const_int C)), where LEN > C.  Extract the
756438fd1498Szrj 	 least significant (LEN - C) bits of X, giving an rtx
756538fd1498Szrj 	 whose mode is MODE, then shift it left C times.  */
756638fd1498Szrj       new_rtx = make_extraction (mode, XEXP (inner, 0),
756738fd1498Szrj 			     0, 0, len - INTVAL (XEXP (inner, 1)),
756838fd1498Szrj 			     unsignedp, in_dest, in_compare);
756938fd1498Szrj       if (new_rtx != 0)
757038fd1498Szrj 	return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
757138fd1498Szrj     }
757238fd1498Szrj   else if (GET_CODE (inner) == TRUNCATE
757338fd1498Szrj 	   /* If trying or potentionally trying to extract
757438fd1498Szrj 	      bits outside of is_mode, don't look through
757538fd1498Szrj 	      TRUNCATE.  See PR82192.  */
757638fd1498Szrj 	   && pos_rtx == NULL_RTX
757738fd1498Szrj 	   && known_le (pos + len, GET_MODE_PRECISION (is_mode)))
757838fd1498Szrj     inner = XEXP (inner, 0);
757938fd1498Szrj 
758038fd1498Szrj   inner_mode = GET_MODE (inner);
758138fd1498Szrj 
758238fd1498Szrj   /* See if this can be done without an extraction.  We never can if the
758338fd1498Szrj      width of the field is not the same as that of some integer mode. For
758438fd1498Szrj      registers, we can only avoid the extraction if the position is at the
758538fd1498Szrj      low-order bit and this is either not in the destination or we have the
758638fd1498Szrj      appropriate STRICT_LOW_PART operation available.
758738fd1498Szrj 
758838fd1498Szrj      For MEM, we can avoid an extract if the field starts on an appropriate
758938fd1498Szrj      boundary and we can change the mode of the memory reference.  */
759038fd1498Szrj 
759138fd1498Szrj   scalar_int_mode tmode;
759238fd1498Szrj   if (int_mode_for_size (len, 1).exists (&tmode)
759338fd1498Szrj       && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
759438fd1498Szrj 	   && !MEM_P (inner)
759538fd1498Szrj 	   && (pos == 0 || REG_P (inner))
759638fd1498Szrj 	   && (inner_mode == tmode
759738fd1498Szrj 	       || !REG_P (inner)
759838fd1498Szrj 	       || TRULY_NOOP_TRUNCATION_MODES_P (tmode, inner_mode)
759938fd1498Szrj 	       || reg_truncated_to_mode (tmode, inner))
760038fd1498Szrj 	   && (! in_dest
760138fd1498Szrj 	       || (REG_P (inner)
760238fd1498Szrj 		   && have_insn_for (STRICT_LOW_PART, tmode))))
760338fd1498Szrj 	  || (MEM_P (inner) && pos_rtx == 0
760438fd1498Szrj 	      && (pos
760538fd1498Szrj 		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
760638fd1498Szrj 		     : BITS_PER_UNIT)) == 0
760738fd1498Szrj 	      /* We can't do this if we are widening INNER_MODE (it
760838fd1498Szrj 		 may not be aligned, for one thing).  */
760938fd1498Szrj 	      && !paradoxical_subreg_p (tmode, inner_mode)
7610*58e805e6Szrj 	      && known_le (pos + len, GET_MODE_PRECISION (is_mode))
761138fd1498Szrj 	      && (inner_mode == tmode
761238fd1498Szrj 		  || (! mode_dependent_address_p (XEXP (inner, 0),
761338fd1498Szrj 						  MEM_ADDR_SPACE (inner))
761438fd1498Szrj 		      && ! MEM_VOLATILE_P (inner))))))
761538fd1498Szrj     {
761638fd1498Szrj       /* If INNER is a MEM, make a new MEM that encompasses just the desired
761738fd1498Szrj 	 field.  If the original and current mode are the same, we need not
761838fd1498Szrj 	 adjust the offset.  Otherwise, we do if bytes big endian.
761938fd1498Szrj 
762038fd1498Szrj 	 If INNER is not a MEM, get a piece consisting of just the field
762138fd1498Szrj 	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
762238fd1498Szrj 
762338fd1498Szrj       if (MEM_P (inner))
762438fd1498Szrj 	{
762538fd1498Szrj 	  poly_int64 offset;
762638fd1498Szrj 
762738fd1498Szrj 	  /* POS counts from lsb, but make OFFSET count in memory order.  */
762838fd1498Szrj 	  if (BYTES_BIG_ENDIAN)
762938fd1498Szrj 	    offset = bits_to_bytes_round_down (GET_MODE_PRECISION (is_mode)
763038fd1498Szrj 					       - len - pos);
763138fd1498Szrj 	  else
763238fd1498Szrj 	    offset = pos / BITS_PER_UNIT;
763338fd1498Szrj 
763438fd1498Szrj 	  new_rtx = adjust_address_nv (inner, tmode, offset);
763538fd1498Szrj 	}
763638fd1498Szrj       else if (REG_P (inner))
763738fd1498Szrj 	{
763838fd1498Szrj 	  if (tmode != inner_mode)
763938fd1498Szrj 	    {
764038fd1498Szrj 	      /* We can't call gen_lowpart in a DEST since we
764138fd1498Szrj 		 always want a SUBREG (see below) and it would sometimes
764238fd1498Szrj 		 return a new hard register.  */
764338fd1498Szrj 	      if (pos || in_dest)
764438fd1498Szrj 		{
764538fd1498Szrj 		  poly_uint64 offset
764638fd1498Szrj 		    = subreg_offset_from_lsb (tmode, inner_mode, pos);
764738fd1498Szrj 
764838fd1498Szrj 		  /* Avoid creating invalid subregs, for example when
764938fd1498Szrj 		     simplifying (x>>32)&255.  */
765038fd1498Szrj 		  if (!validate_subreg (tmode, inner_mode, inner, offset))
765138fd1498Szrj 		    return NULL_RTX;
765238fd1498Szrj 
765338fd1498Szrj 		  new_rtx = gen_rtx_SUBREG (tmode, inner, offset);
765438fd1498Szrj 		}
765538fd1498Szrj 	      else
765638fd1498Szrj 		new_rtx = gen_lowpart (tmode, inner);
765738fd1498Szrj 	    }
765838fd1498Szrj 	  else
765938fd1498Szrj 	    new_rtx = inner;
766038fd1498Szrj 	}
766138fd1498Szrj       else
766238fd1498Szrj 	new_rtx = force_to_mode (inner, tmode,
766338fd1498Szrj 				 len >= HOST_BITS_PER_WIDE_INT
766438fd1498Szrj 				 ? HOST_WIDE_INT_M1U
766538fd1498Szrj 				 : (HOST_WIDE_INT_1U << len) - 1, 0);
766638fd1498Szrj 
766738fd1498Szrj       /* If this extraction is going into the destination of a SET,
766838fd1498Szrj 	 make a STRICT_LOW_PART unless we made a MEM.  */
766938fd1498Szrj 
767038fd1498Szrj       if (in_dest)
767138fd1498Szrj 	return (MEM_P (new_rtx) ? new_rtx
767238fd1498Szrj 		: (GET_CODE (new_rtx) != SUBREG
767338fd1498Szrj 		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
767438fd1498Szrj 		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
767538fd1498Szrj 
767638fd1498Szrj       if (mode == tmode)
767738fd1498Szrj 	return new_rtx;
767838fd1498Szrj 
767938fd1498Szrj       if (CONST_SCALAR_INT_P (new_rtx))
768038fd1498Szrj 	return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
768138fd1498Szrj 					 mode, new_rtx, tmode);
768238fd1498Szrj 
768338fd1498Szrj       /* If we know that no extraneous bits are set, and that the high
768438fd1498Szrj 	 bit is not set, convert the extraction to the cheaper of
768538fd1498Szrj 	 sign and zero extension, that are equivalent in these cases.  */
768638fd1498Szrj       if (flag_expensive_optimizations
768738fd1498Szrj 	  && (HWI_COMPUTABLE_MODE_P (tmode)
768838fd1498Szrj 	      && ((nonzero_bits (new_rtx, tmode)
768938fd1498Szrj 		   & ~(((unsigned HOST_WIDE_INT)GET_MODE_MASK (tmode)) >> 1))
769038fd1498Szrj 		  == 0)))
769138fd1498Szrj 	{
769238fd1498Szrj 	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
769338fd1498Szrj 	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
769438fd1498Szrj 
769538fd1498Szrj 	  /* Prefer ZERO_EXTENSION, since it gives more information to
769638fd1498Szrj 	     backends.  */
769738fd1498Szrj 	  if (set_src_cost (temp, mode, optimize_this_for_speed_p)
769838fd1498Szrj 	      <= set_src_cost (temp1, mode, optimize_this_for_speed_p))
769938fd1498Szrj 	    return temp;
770038fd1498Szrj 	  return temp1;
770138fd1498Szrj 	}
770238fd1498Szrj 
770338fd1498Szrj       /* Otherwise, sign- or zero-extend unless we already are in the
770438fd1498Szrj 	 proper mode.  */
770538fd1498Szrj 
770638fd1498Szrj       return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
770738fd1498Szrj 			     mode, new_rtx));
770838fd1498Szrj     }
770938fd1498Szrj 
771038fd1498Szrj   /* Unless this is a COMPARE or we have a funny memory reference,
771138fd1498Szrj      don't do anything with zero-extending field extracts starting at
771238fd1498Szrj      the low-order bit since they are simple AND operations.  */
771338fd1498Szrj   if (pos_rtx == 0 && pos == 0 && ! in_dest
771438fd1498Szrj       && ! in_compare && unsignedp)
771538fd1498Szrj     return 0;
771638fd1498Szrj 
771738fd1498Szrj   /* Unless INNER is not MEM, reject this if we would be spanning bytes or
771838fd1498Szrj      if the position is not a constant and the length is not 1.  In all
771938fd1498Szrj      other cases, we would only be going outside our object in cases when
772038fd1498Szrj      an original shift would have been undefined.  */
772138fd1498Szrj   if (MEM_P (inner)
772238fd1498Szrj       && ((pos_rtx == 0 && maybe_gt (pos + len, GET_MODE_PRECISION (is_mode)))
772338fd1498Szrj 	  || (pos_rtx != 0 && len != 1)))
772438fd1498Szrj     return 0;
772538fd1498Szrj 
772638fd1498Szrj   enum extraction_pattern pattern = (in_dest ? EP_insv
772738fd1498Szrj 				     : unsignedp ? EP_extzv : EP_extv);
772838fd1498Szrj 
772938fd1498Szrj   /* If INNER is not from memory, we want it to have the mode of a register
773038fd1498Szrj      extraction pattern's structure operand, or word_mode if there is no
773138fd1498Szrj      such pattern.  The same applies to extraction_mode and pos_mode
773238fd1498Szrj      and their respective operands.
773338fd1498Szrj 
773438fd1498Szrj      For memory, assume that the desired extraction_mode and pos_mode
773538fd1498Szrj      are the same as for a register operation, since at present we don't
773638fd1498Szrj      have named patterns for aligned memory structures.  */
773738fd1498Szrj   struct extraction_insn insn;
773838fd1498Szrj   unsigned int inner_size;
773938fd1498Szrj   if (GET_MODE_BITSIZE (inner_mode).is_constant (&inner_size)
774038fd1498Szrj       && get_best_reg_extraction_insn (&insn, pattern, inner_size, mode))
774138fd1498Szrj     {
774238fd1498Szrj       wanted_inner_reg_mode = insn.struct_mode.require ();
774338fd1498Szrj       pos_mode = insn.pos_mode;
774438fd1498Szrj       extraction_mode = insn.field_mode;
774538fd1498Szrj     }
774638fd1498Szrj 
774738fd1498Szrj   /* Never narrow an object, since that might not be safe.  */
774838fd1498Szrj 
774938fd1498Szrj   if (mode != VOIDmode
775038fd1498Szrj       && partial_subreg_p (extraction_mode, mode))
775138fd1498Szrj     extraction_mode = mode;
775238fd1498Szrj 
7753*58e805e6Szrj   /* Punt if len is too large for extraction_mode.  */
7754*58e805e6Szrj   if (maybe_gt (len, GET_MODE_PRECISION (extraction_mode)))
7755*58e805e6Szrj     return NULL_RTX;
7756*58e805e6Szrj 
775738fd1498Szrj   if (!MEM_P (inner))
775838fd1498Szrj     wanted_inner_mode = wanted_inner_reg_mode;
775938fd1498Szrj   else
776038fd1498Szrj     {
776138fd1498Szrj       /* Be careful not to go beyond the extracted object and maintain the
776238fd1498Szrj 	 natural alignment of the memory.  */
776338fd1498Szrj       wanted_inner_mode = smallest_int_mode_for_size (len);
776438fd1498Szrj       while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
776538fd1498Szrj 	     > GET_MODE_BITSIZE (wanted_inner_mode))
776638fd1498Szrj 	wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode).require ();
776738fd1498Szrj     }
776838fd1498Szrj 
776938fd1498Szrj   orig_pos = pos;
777038fd1498Szrj 
777138fd1498Szrj   if (BITS_BIG_ENDIAN)
777238fd1498Szrj     {
777338fd1498Szrj       /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
777438fd1498Szrj 	 BITS_BIG_ENDIAN style.  If position is constant, compute new
777538fd1498Szrj 	 position.  Otherwise, build subtraction.
777638fd1498Szrj 	 Note that POS is relative to the mode of the original argument.
777738fd1498Szrj 	 If it's a MEM we need to recompute POS relative to that.
777838fd1498Szrj 	 However, if we're extracting from (or inserting into) a register,
777938fd1498Szrj 	 we want to recompute POS relative to wanted_inner_mode.  */
778038fd1498Szrj       int width;
778138fd1498Szrj       if (!MEM_P (inner))
778238fd1498Szrj 	width = GET_MODE_BITSIZE (wanted_inner_mode);
778338fd1498Szrj       else if (!GET_MODE_BITSIZE (is_mode).is_constant (&width))
778438fd1498Szrj 	return NULL_RTX;
778538fd1498Szrj 
778638fd1498Szrj       if (pos_rtx == 0)
778738fd1498Szrj 	pos = width - len - pos;
778838fd1498Szrj       else
778938fd1498Szrj 	pos_rtx
779038fd1498Szrj 	  = gen_rtx_MINUS (GET_MODE (pos_rtx),
779138fd1498Szrj 			   gen_int_mode (width - len, GET_MODE (pos_rtx)),
779238fd1498Szrj 			   pos_rtx);
779338fd1498Szrj       /* POS may be less than 0 now, but we check for that below.
779438fd1498Szrj 	 Note that it can only be less than 0 if !MEM_P (inner).  */
779538fd1498Szrj     }
779638fd1498Szrj 
779738fd1498Szrj   /* If INNER has a wider mode, and this is a constant extraction, try to
779838fd1498Szrj      make it smaller and adjust the byte to point to the byte containing
779938fd1498Szrj      the value.  */
780038fd1498Szrj   if (wanted_inner_mode != VOIDmode
780138fd1498Szrj       && inner_mode != wanted_inner_mode
780238fd1498Szrj       && ! pos_rtx
780338fd1498Szrj       && partial_subreg_p (wanted_inner_mode, is_mode)
780438fd1498Szrj       && MEM_P (inner)
780538fd1498Szrj       && ! mode_dependent_address_p (XEXP (inner, 0), MEM_ADDR_SPACE (inner))
780638fd1498Szrj       && ! MEM_VOLATILE_P (inner))
780738fd1498Szrj     {
780838fd1498Szrj       poly_int64 offset = 0;
780938fd1498Szrj 
781038fd1498Szrj       /* The computations below will be correct if the machine is big
781138fd1498Szrj 	 endian in both bits and bytes or little endian in bits and bytes.
781238fd1498Szrj 	 If it is mixed, we must adjust.  */
781338fd1498Szrj 
781438fd1498Szrj       /* If bytes are big endian and we had a paradoxical SUBREG, we must
781538fd1498Szrj 	 adjust OFFSET to compensate.  */
781638fd1498Szrj       if (BYTES_BIG_ENDIAN
781738fd1498Szrj 	  && paradoxical_subreg_p (is_mode, inner_mode))
781838fd1498Szrj 	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
781938fd1498Szrj 
782038fd1498Szrj       /* We can now move to the desired byte.  */
782138fd1498Szrj       offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
782238fd1498Szrj 		* GET_MODE_SIZE (wanted_inner_mode);
782338fd1498Szrj       pos %= GET_MODE_BITSIZE (wanted_inner_mode);
782438fd1498Szrj 
782538fd1498Szrj       if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
782638fd1498Szrj 	  && is_mode != wanted_inner_mode)
782738fd1498Szrj 	offset = (GET_MODE_SIZE (is_mode)
782838fd1498Szrj 		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
782938fd1498Szrj 
783038fd1498Szrj       inner = adjust_address_nv (inner, wanted_inner_mode, offset);
783138fd1498Szrj     }
783238fd1498Szrj 
783338fd1498Szrj   /* If INNER is not memory, get it into the proper mode.  If we are changing
783438fd1498Szrj      its mode, POS must be a constant and smaller than the size of the new
783538fd1498Szrj      mode.  */
783638fd1498Szrj   else if (!MEM_P (inner))
783738fd1498Szrj     {
783838fd1498Szrj       /* On the LHS, don't create paradoxical subregs implicitely truncating
783938fd1498Szrj 	 the register unless TARGET_TRULY_NOOP_TRUNCATION.  */
784038fd1498Szrj       if (in_dest
784138fd1498Szrj 	  && !TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (inner),
784238fd1498Szrj 					     wanted_inner_mode))
784338fd1498Szrj 	return NULL_RTX;
784438fd1498Szrj 
784538fd1498Szrj       if (GET_MODE (inner) != wanted_inner_mode
784638fd1498Szrj 	  && (pos_rtx != 0
784738fd1498Szrj 	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
784838fd1498Szrj 	return NULL_RTX;
784938fd1498Szrj 
785038fd1498Szrj       if (orig_pos < 0)
785138fd1498Szrj 	return NULL_RTX;
785238fd1498Szrj 
785338fd1498Szrj       inner = force_to_mode (inner, wanted_inner_mode,
785438fd1498Szrj 			     pos_rtx
785538fd1498Szrj 			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
785638fd1498Szrj 			     ? HOST_WIDE_INT_M1U
785738fd1498Szrj 			     : (((HOST_WIDE_INT_1U << len) - 1)
785838fd1498Szrj 				<< orig_pos),
785938fd1498Szrj 			     0);
786038fd1498Szrj     }
786138fd1498Szrj 
786238fd1498Szrj   /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
786338fd1498Szrj      have to zero extend.  Otherwise, we can just use a SUBREG.
786438fd1498Szrj 
786538fd1498Szrj      We dealt with constant rtxes earlier, so pos_rtx cannot
786638fd1498Szrj      have VOIDmode at this point.  */
786738fd1498Szrj   if (pos_rtx != 0
786838fd1498Szrj       && (GET_MODE_SIZE (pos_mode)
786938fd1498Szrj 	  > GET_MODE_SIZE (as_a <scalar_int_mode> (GET_MODE (pos_rtx)))))
787038fd1498Szrj     {
787138fd1498Szrj       rtx temp = simplify_gen_unary (ZERO_EXTEND, pos_mode, pos_rtx,
787238fd1498Szrj 				     GET_MODE (pos_rtx));
787338fd1498Szrj 
787438fd1498Szrj       /* If we know that no extraneous bits are set, and that the high
787538fd1498Szrj 	 bit is not set, convert extraction to cheaper one - either
787638fd1498Szrj 	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
787738fd1498Szrj 	 cases.  */
787838fd1498Szrj       if (flag_expensive_optimizations
787938fd1498Szrj 	  && (HWI_COMPUTABLE_MODE_P (GET_MODE (pos_rtx))
788038fd1498Szrj 	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
788138fd1498Szrj 		   & ~(((unsigned HOST_WIDE_INT)
788238fd1498Szrj 			GET_MODE_MASK (GET_MODE (pos_rtx)))
788338fd1498Szrj 		       >> 1))
788438fd1498Szrj 		  == 0)))
788538fd1498Szrj 	{
788638fd1498Szrj 	  rtx temp1 = simplify_gen_unary (SIGN_EXTEND, pos_mode, pos_rtx,
788738fd1498Szrj 					  GET_MODE (pos_rtx));
788838fd1498Szrj 
788938fd1498Szrj 	  /* Prefer ZERO_EXTENSION, since it gives more information to
789038fd1498Szrj 	     backends.  */
789138fd1498Szrj 	  if (set_src_cost (temp1, pos_mode, optimize_this_for_speed_p)
789238fd1498Szrj 	      < set_src_cost (temp, pos_mode, optimize_this_for_speed_p))
789338fd1498Szrj 	    temp = temp1;
789438fd1498Szrj 	}
789538fd1498Szrj       pos_rtx = temp;
789638fd1498Szrj     }
789738fd1498Szrj 
789838fd1498Szrj   /* Make POS_RTX unless we already have it and it is correct.  If we don't
789938fd1498Szrj      have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
790038fd1498Szrj      be a CONST_INT.  */
790138fd1498Szrj   if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
790238fd1498Szrj     pos_rtx = orig_pos_rtx;
790338fd1498Szrj 
790438fd1498Szrj   else if (pos_rtx == 0)
790538fd1498Szrj     pos_rtx = GEN_INT (pos);
790638fd1498Szrj 
790738fd1498Szrj   /* Make the required operation.  See if we can use existing rtx.  */
790838fd1498Szrj   new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
790938fd1498Szrj 			 extraction_mode, inner, GEN_INT (len), pos_rtx);
791038fd1498Szrj   if (! in_dest)
791138fd1498Szrj     new_rtx = gen_lowpart (mode, new_rtx);
791238fd1498Szrj 
791338fd1498Szrj   return new_rtx;
791438fd1498Szrj }
791538fd1498Szrj 
791638fd1498Szrj /* See if X (of mode MODE) contains an ASHIFT of COUNT or more bits that
791738fd1498Szrj    can be commuted with any other operations in X.  Return X without
791838fd1498Szrj    that shift if so.  */
791938fd1498Szrj 
792038fd1498Szrj static rtx
extract_left_shift(scalar_int_mode mode,rtx x,int count)792138fd1498Szrj extract_left_shift (scalar_int_mode mode, rtx x, int count)
792238fd1498Szrj {
792338fd1498Szrj   enum rtx_code code = GET_CODE (x);
792438fd1498Szrj   rtx tem;
792538fd1498Szrj 
792638fd1498Szrj   switch (code)
792738fd1498Szrj     {
792838fd1498Szrj     case ASHIFT:
792938fd1498Szrj       /* This is the shift itself.  If it is wide enough, we will return
793038fd1498Szrj 	 either the value being shifted if the shift count is equal to
793138fd1498Szrj 	 COUNT or a shift for the difference.  */
793238fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
793338fd1498Szrj 	  && INTVAL (XEXP (x, 1)) >= count)
793438fd1498Szrj 	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
793538fd1498Szrj 				     INTVAL (XEXP (x, 1)) - count);
793638fd1498Szrj       break;
793738fd1498Szrj 
793838fd1498Szrj     case NEG:  case NOT:
793938fd1498Szrj       if ((tem = extract_left_shift (mode, XEXP (x, 0), count)) != 0)
794038fd1498Szrj 	return simplify_gen_unary (code, mode, tem, mode);
794138fd1498Szrj 
794238fd1498Szrj       break;
794338fd1498Szrj 
794438fd1498Szrj     case PLUS:  case IOR:  case XOR:  case AND:
794538fd1498Szrj       /* If we can safely shift this constant and we find the inner shift,
794638fd1498Szrj 	 make a new operation.  */
794738fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
794838fd1498Szrj 	  && (UINTVAL (XEXP (x, 1))
794938fd1498Szrj 	      & (((HOST_WIDE_INT_1U << count)) - 1)) == 0
795038fd1498Szrj 	  && (tem = extract_left_shift (mode, XEXP (x, 0), count)) != 0)
795138fd1498Szrj 	{
795238fd1498Szrj 	  HOST_WIDE_INT val = INTVAL (XEXP (x, 1)) >> count;
795338fd1498Szrj 	  return simplify_gen_binary (code, mode, tem,
795438fd1498Szrj 				      gen_int_mode (val, mode));
795538fd1498Szrj 	}
795638fd1498Szrj       break;
795738fd1498Szrj 
795838fd1498Szrj     default:
795938fd1498Szrj       break;
796038fd1498Szrj     }
796138fd1498Szrj 
796238fd1498Szrj   return 0;
796338fd1498Szrj }
796438fd1498Szrj 
796538fd1498Szrj /* Subroutine of make_compound_operation.  *X_PTR is the rtx at the current
796638fd1498Szrj    level of the expression and MODE is its mode.  IN_CODE is as for
796738fd1498Szrj    make_compound_operation.  *NEXT_CODE_PTR is the value of IN_CODE
796838fd1498Szrj    that should be used when recursing on operands of *X_PTR.
796938fd1498Szrj 
797038fd1498Szrj    There are two possible actions:
797138fd1498Szrj 
797238fd1498Szrj    - Return null.  This tells the caller to recurse on *X_PTR with IN_CODE
797338fd1498Szrj      equal to *NEXT_CODE_PTR, after which *X_PTR holds the final value.
797438fd1498Szrj 
797538fd1498Szrj    - Return a new rtx, which the caller returns directly.  */
797638fd1498Szrj 
797738fd1498Szrj static rtx
make_compound_operation_int(scalar_int_mode mode,rtx * x_ptr,enum rtx_code in_code,enum rtx_code * next_code_ptr)797838fd1498Szrj make_compound_operation_int (scalar_int_mode mode, rtx *x_ptr,
797938fd1498Szrj 			     enum rtx_code in_code,
798038fd1498Szrj 			     enum rtx_code *next_code_ptr)
798138fd1498Szrj {
798238fd1498Szrj   rtx x = *x_ptr;
798338fd1498Szrj   enum rtx_code next_code = *next_code_ptr;
798438fd1498Szrj   enum rtx_code code = GET_CODE (x);
798538fd1498Szrj   int mode_width = GET_MODE_PRECISION (mode);
798638fd1498Szrj   rtx rhs, lhs;
798738fd1498Szrj   rtx new_rtx = 0;
798838fd1498Szrj   int i;
798938fd1498Szrj   rtx tem;
799038fd1498Szrj   scalar_int_mode inner_mode;
799138fd1498Szrj   bool equality_comparison = false;
799238fd1498Szrj 
799338fd1498Szrj   if (in_code == EQ)
799438fd1498Szrj     {
799538fd1498Szrj       equality_comparison = true;
799638fd1498Szrj       in_code = COMPARE;
799738fd1498Szrj     }
799838fd1498Szrj 
799938fd1498Szrj   /* Process depending on the code of this operation.  If NEW is set
800038fd1498Szrj      nonzero, it will be returned.  */
800138fd1498Szrj 
800238fd1498Szrj   switch (code)
800338fd1498Szrj     {
800438fd1498Szrj     case ASHIFT:
800538fd1498Szrj       /* Convert shifts by constants into multiplications if inside
800638fd1498Szrj 	 an address.  */
800738fd1498Szrj       if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
800838fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
800938fd1498Szrj 	  && INTVAL (XEXP (x, 1)) >= 0)
801038fd1498Szrj 	{
801138fd1498Szrj 	  HOST_WIDE_INT count = INTVAL (XEXP (x, 1));
801238fd1498Szrj 	  HOST_WIDE_INT multval = HOST_WIDE_INT_1 << count;
801338fd1498Szrj 
801438fd1498Szrj 	  new_rtx = make_compound_operation (XEXP (x, 0), next_code);
801538fd1498Szrj 	  if (GET_CODE (new_rtx) == NEG)
801638fd1498Szrj 	    {
801738fd1498Szrj 	      new_rtx = XEXP (new_rtx, 0);
801838fd1498Szrj 	      multval = -multval;
801938fd1498Szrj 	    }
802038fd1498Szrj 	  multval = trunc_int_for_mode (multval, mode);
802138fd1498Szrj 	  new_rtx = gen_rtx_MULT (mode, new_rtx, gen_int_mode (multval, mode));
802238fd1498Szrj 	}
802338fd1498Szrj       break;
802438fd1498Szrj 
802538fd1498Szrj     case PLUS:
802638fd1498Szrj       lhs = XEXP (x, 0);
802738fd1498Szrj       rhs = XEXP (x, 1);
802838fd1498Szrj       lhs = make_compound_operation (lhs, next_code);
802938fd1498Szrj       rhs = make_compound_operation (rhs, next_code);
803038fd1498Szrj       if (GET_CODE (lhs) == MULT && GET_CODE (XEXP (lhs, 0)) == NEG)
803138fd1498Szrj 	{
803238fd1498Szrj 	  tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (lhs, 0), 0),
803338fd1498Szrj 				     XEXP (lhs, 1));
803438fd1498Szrj 	  new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
803538fd1498Szrj 	}
803638fd1498Szrj       else if (GET_CODE (lhs) == MULT
803738fd1498Szrj 	       && (CONST_INT_P (XEXP (lhs, 1)) && INTVAL (XEXP (lhs, 1)) < 0))
803838fd1498Szrj 	{
803938fd1498Szrj 	  tem = simplify_gen_binary (MULT, mode, XEXP (lhs, 0),
804038fd1498Szrj 				     simplify_gen_unary (NEG, mode,
804138fd1498Szrj 							 XEXP (lhs, 1),
804238fd1498Szrj 							 mode));
804338fd1498Szrj 	  new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
804438fd1498Szrj 	}
804538fd1498Szrj       else
804638fd1498Szrj 	{
804738fd1498Szrj 	  SUBST (XEXP (x, 0), lhs);
804838fd1498Szrj 	  SUBST (XEXP (x, 1), rhs);
804938fd1498Szrj 	}
805038fd1498Szrj       maybe_swap_commutative_operands (x);
805138fd1498Szrj       return x;
805238fd1498Szrj 
805338fd1498Szrj     case MINUS:
805438fd1498Szrj       lhs = XEXP (x, 0);
805538fd1498Szrj       rhs = XEXP (x, 1);
805638fd1498Szrj       lhs = make_compound_operation (lhs, next_code);
805738fd1498Szrj       rhs = make_compound_operation (rhs, next_code);
805838fd1498Szrj       if (GET_CODE (rhs) == MULT && GET_CODE (XEXP (rhs, 0)) == NEG)
805938fd1498Szrj 	{
806038fd1498Szrj 	  tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (rhs, 0), 0),
806138fd1498Szrj 				     XEXP (rhs, 1));
806238fd1498Szrj 	  return simplify_gen_binary (PLUS, mode, tem, lhs);
806338fd1498Szrj 	}
806438fd1498Szrj       else if (GET_CODE (rhs) == MULT
806538fd1498Szrj 	       && (CONST_INT_P (XEXP (rhs, 1)) && INTVAL (XEXP (rhs, 1)) < 0))
806638fd1498Szrj 	{
806738fd1498Szrj 	  tem = simplify_gen_binary (MULT, mode, XEXP (rhs, 0),
806838fd1498Szrj 				     simplify_gen_unary (NEG, mode,
806938fd1498Szrj 							 XEXP (rhs, 1),
807038fd1498Szrj 							 mode));
807138fd1498Szrj 	  return simplify_gen_binary (PLUS, mode, tem, lhs);
807238fd1498Szrj 	}
807338fd1498Szrj       else
807438fd1498Szrj 	{
807538fd1498Szrj 	  SUBST (XEXP (x, 0), lhs);
807638fd1498Szrj 	  SUBST (XEXP (x, 1), rhs);
807738fd1498Szrj 	  return x;
807838fd1498Szrj 	}
807938fd1498Szrj 
808038fd1498Szrj     case AND:
808138fd1498Szrj       /* If the second operand is not a constant, we can't do anything
808238fd1498Szrj 	 with it.  */
808338fd1498Szrj       if (!CONST_INT_P (XEXP (x, 1)))
808438fd1498Szrj 	break;
808538fd1498Szrj 
808638fd1498Szrj       /* If the constant is a power of two minus one and the first operand
808738fd1498Szrj 	 is a logical right shift, make an extraction.  */
808838fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
808938fd1498Szrj 	  && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
809038fd1498Szrj 	{
809138fd1498Szrj 	  new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
809238fd1498Szrj 	  new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1),
809338fd1498Szrj 				     i, 1, 0, in_code == COMPARE);
809438fd1498Szrj 	}
809538fd1498Szrj 
809638fd1498Szrj       /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
809738fd1498Szrj       else if (GET_CODE (XEXP (x, 0)) == SUBREG
809838fd1498Szrj 	       && subreg_lowpart_p (XEXP (x, 0))
809938fd1498Szrj 	       && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (XEXP (x, 0))),
810038fd1498Szrj 					  &inner_mode)
810138fd1498Szrj 	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
810238fd1498Szrj 	       && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
810338fd1498Szrj 	{
810438fd1498Szrj 	  rtx inner_x0 = SUBREG_REG (XEXP (x, 0));
810538fd1498Szrj 	  new_rtx = make_compound_operation (XEXP (inner_x0, 0), next_code);
810638fd1498Szrj 	  new_rtx = make_extraction (inner_mode, new_rtx, 0,
810738fd1498Szrj 				     XEXP (inner_x0, 1),
810838fd1498Szrj 				     i, 1, 0, in_code == COMPARE);
810938fd1498Szrj 
811038fd1498Szrj 	  /* If we narrowed the mode when dropping the subreg, then we lose.  */
811138fd1498Szrj 	  if (GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (mode))
811238fd1498Szrj 	    new_rtx = NULL;
811338fd1498Szrj 
811438fd1498Szrj 	  /* If that didn't give anything, see if the AND simplifies on
811538fd1498Szrj 	     its own.  */
811638fd1498Szrj 	  if (!new_rtx && i >= 0)
811738fd1498Szrj 	    {
811838fd1498Szrj 	      new_rtx = make_compound_operation (XEXP (x, 0), next_code);
811938fd1498Szrj 	      new_rtx = make_extraction (mode, new_rtx, 0, NULL_RTX, i, 1,
812038fd1498Szrj 					 0, in_code == COMPARE);
812138fd1498Szrj 	    }
812238fd1498Szrj 	}
812338fd1498Szrj       /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
812438fd1498Szrj       else if ((GET_CODE (XEXP (x, 0)) == XOR
812538fd1498Szrj 		|| GET_CODE (XEXP (x, 0)) == IOR)
812638fd1498Szrj 	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
812738fd1498Szrj 	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
812838fd1498Szrj 	       && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
812938fd1498Szrj 	{
813038fd1498Szrj 	  /* Apply the distributive law, and then try to make extractions.  */
813138fd1498Szrj 	  new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
813238fd1498Szrj 				    gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
813338fd1498Szrj 						 XEXP (x, 1)),
813438fd1498Szrj 				    gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
813538fd1498Szrj 						 XEXP (x, 1)));
813638fd1498Szrj 	  new_rtx = make_compound_operation (new_rtx, in_code);
813738fd1498Szrj 	}
813838fd1498Szrj 
813938fd1498Szrj       /* If we are have (and (rotate X C) M) and C is larger than the number
814038fd1498Szrj 	 of bits in M, this is an extraction.  */
814138fd1498Szrj 
814238fd1498Szrj       else if (GET_CODE (XEXP (x, 0)) == ROTATE
814338fd1498Szrj 	       && CONST_INT_P (XEXP (XEXP (x, 0), 1))
814438fd1498Szrj 	       && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0
814538fd1498Szrj 	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
814638fd1498Szrj 	{
814738fd1498Szrj 	  new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
814838fd1498Szrj 	  new_rtx = make_extraction (mode, new_rtx,
814938fd1498Szrj 				     (GET_MODE_PRECISION (mode)
815038fd1498Szrj 				      - INTVAL (XEXP (XEXP (x, 0), 1))),
815138fd1498Szrj 				     NULL_RTX, i, 1, 0, in_code == COMPARE);
815238fd1498Szrj 	}
815338fd1498Szrj 
815438fd1498Szrj       /* On machines without logical shifts, if the operand of the AND is
815538fd1498Szrj 	 a logical shift and our mask turns off all the propagated sign
815638fd1498Szrj 	 bits, we can replace the logical shift with an arithmetic shift.  */
815738fd1498Szrj       else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
815838fd1498Szrj 	       && !have_insn_for (LSHIFTRT, mode)
815938fd1498Szrj 	       && have_insn_for (ASHIFTRT, mode)
816038fd1498Szrj 	       && CONST_INT_P (XEXP (XEXP (x, 0), 1))
816138fd1498Szrj 	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
816238fd1498Szrj 	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
816338fd1498Szrj 	       && mode_width <= HOST_BITS_PER_WIDE_INT)
816438fd1498Szrj 	{
816538fd1498Szrj 	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
816638fd1498Szrj 
816738fd1498Szrj 	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
816838fd1498Szrj 	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
816938fd1498Szrj 	    SUBST (XEXP (x, 0),
817038fd1498Szrj 		   gen_rtx_ASHIFTRT (mode,
817138fd1498Szrj 				     make_compound_operation (XEXP (XEXP (x,
817238fd1498Szrj 									  0),
817338fd1498Szrj 								    0),
817438fd1498Szrj 							      next_code),
817538fd1498Szrj 				     XEXP (XEXP (x, 0), 1)));
817638fd1498Szrj 	}
817738fd1498Szrj 
817838fd1498Szrj       /* If the constant is one less than a power of two, this might be
817938fd1498Szrj 	 representable by an extraction even if no shift is present.
818038fd1498Szrj 	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
818138fd1498Szrj 	 we are in a COMPARE.  */
818238fd1498Szrj       else if ((i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
818338fd1498Szrj 	new_rtx = make_extraction (mode,
818438fd1498Szrj 				   make_compound_operation (XEXP (x, 0),
818538fd1498Szrj 							    next_code),
818638fd1498Szrj 				   0, NULL_RTX, i, 1, 0, in_code == COMPARE);
818738fd1498Szrj 
818838fd1498Szrj       /* If we are in a comparison and this is an AND with a power of two,
818938fd1498Szrj 	 convert this into the appropriate bit extract.  */
819038fd1498Szrj       else if (in_code == COMPARE
819138fd1498Szrj 	       && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
819238fd1498Szrj 	       && (equality_comparison || i < GET_MODE_PRECISION (mode) - 1))
819338fd1498Szrj 	new_rtx = make_extraction (mode,
819438fd1498Szrj 				   make_compound_operation (XEXP (x, 0),
819538fd1498Szrj 							    next_code),
819638fd1498Szrj 				   i, NULL_RTX, 1, 1, 0, 1);
819738fd1498Szrj 
819838fd1498Szrj       /* If the one operand is a paradoxical subreg of a register or memory and
819938fd1498Szrj 	 the constant (limited to the smaller mode) has only zero bits where
820038fd1498Szrj 	 the sub expression has known zero bits, this can be expressed as
820138fd1498Szrj 	 a zero_extend.  */
820238fd1498Szrj       else if (GET_CODE (XEXP (x, 0)) == SUBREG)
820338fd1498Szrj 	{
820438fd1498Szrj 	  rtx sub;
820538fd1498Szrj 
820638fd1498Szrj 	  sub = XEXP (XEXP (x, 0), 0);
820738fd1498Szrj 	  machine_mode sub_mode = GET_MODE (sub);
820838fd1498Szrj 	  int sub_width;
820938fd1498Szrj 	  if ((REG_P (sub) || MEM_P (sub))
821038fd1498Szrj 	      && GET_MODE_PRECISION (sub_mode).is_constant (&sub_width)
821138fd1498Szrj 	      && sub_width < mode_width)
821238fd1498Szrj 	    {
821338fd1498Szrj 	      unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (sub_mode);
821438fd1498Szrj 	      unsigned HOST_WIDE_INT mask;
821538fd1498Szrj 
821638fd1498Szrj 	      /* original AND constant with all the known zero bits set */
821738fd1498Szrj 	      mask = UINTVAL (XEXP (x, 1)) | (~nonzero_bits (sub, sub_mode));
821838fd1498Szrj 	      if ((mask & mode_mask) == mode_mask)
821938fd1498Szrj 		{
822038fd1498Szrj 		  new_rtx = make_compound_operation (sub, next_code);
822138fd1498Szrj 		  new_rtx = make_extraction (mode, new_rtx, 0, 0, sub_width,
822238fd1498Szrj 					     1, 0, in_code == COMPARE);
822338fd1498Szrj 		}
822438fd1498Szrj 	    }
822538fd1498Szrj 	}
822638fd1498Szrj 
822738fd1498Szrj       break;
822838fd1498Szrj 
822938fd1498Szrj     case LSHIFTRT:
823038fd1498Szrj       /* If the sign bit is known to be zero, replace this with an
823138fd1498Szrj 	 arithmetic shift.  */
823238fd1498Szrj       if (have_insn_for (ASHIFTRT, mode)
823338fd1498Szrj 	  && ! have_insn_for (LSHIFTRT, mode)
823438fd1498Szrj 	  && mode_width <= HOST_BITS_PER_WIDE_INT
823538fd1498Szrj 	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
823638fd1498Szrj 	{
823738fd1498Szrj 	  new_rtx = gen_rtx_ASHIFTRT (mode,
823838fd1498Szrj 				      make_compound_operation (XEXP (x, 0),
823938fd1498Szrj 							       next_code),
824038fd1498Szrj 				      XEXP (x, 1));
824138fd1498Szrj 	  break;
824238fd1498Szrj 	}
824338fd1498Szrj 
824438fd1498Szrj       /* fall through */
824538fd1498Szrj 
824638fd1498Szrj     case ASHIFTRT:
824738fd1498Szrj       lhs = XEXP (x, 0);
824838fd1498Szrj       rhs = XEXP (x, 1);
824938fd1498Szrj 
825038fd1498Szrj       /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
825138fd1498Szrj 	 this is a SIGN_EXTRACT.  */
825238fd1498Szrj       if (CONST_INT_P (rhs)
825338fd1498Szrj 	  && GET_CODE (lhs) == ASHIFT
825438fd1498Szrj 	  && CONST_INT_P (XEXP (lhs, 1))
825538fd1498Szrj 	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
825638fd1498Szrj 	  && INTVAL (XEXP (lhs, 1)) >= 0
825738fd1498Szrj 	  && INTVAL (rhs) < mode_width)
825838fd1498Szrj 	{
825938fd1498Szrj 	  new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
826038fd1498Szrj 	  new_rtx = make_extraction (mode, new_rtx,
826138fd1498Szrj 				     INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
826238fd1498Szrj 				     NULL_RTX, mode_width - INTVAL (rhs),
826338fd1498Szrj 				     code == LSHIFTRT, 0, in_code == COMPARE);
826438fd1498Szrj 	  break;
826538fd1498Szrj 	}
826638fd1498Szrj 
826738fd1498Szrj       /* See if we have operations between an ASHIFTRT and an ASHIFT.
826838fd1498Szrj 	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
826938fd1498Szrj 	 also do this for some cases of SIGN_EXTRACT, but it doesn't
827038fd1498Szrj 	 seem worth the effort; the case checked for occurs on Alpha.  */
827138fd1498Szrj 
827238fd1498Szrj       if (!OBJECT_P (lhs)
827338fd1498Szrj 	  && ! (GET_CODE (lhs) == SUBREG
827438fd1498Szrj 		&& (OBJECT_P (SUBREG_REG (lhs))))
827538fd1498Szrj 	  && CONST_INT_P (rhs)
827638fd1498Szrj 	  && INTVAL (rhs) >= 0
827738fd1498Szrj 	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
827838fd1498Szrj 	  && INTVAL (rhs) < mode_width
827938fd1498Szrj 	  && (new_rtx = extract_left_shift (mode, lhs, INTVAL (rhs))) != 0)
828038fd1498Szrj 	new_rtx = make_extraction (mode, make_compound_operation (new_rtx,
828138fd1498Szrj 								  next_code),
828238fd1498Szrj 				   0, NULL_RTX, mode_width - INTVAL (rhs),
828338fd1498Szrj 				   code == LSHIFTRT, 0, in_code == COMPARE);
828438fd1498Szrj 
828538fd1498Szrj       break;
828638fd1498Szrj 
828738fd1498Szrj     case SUBREG:
828838fd1498Szrj       /* Call ourselves recursively on the inner expression.  If we are
828938fd1498Szrj 	 narrowing the object and it has a different RTL code from
829038fd1498Szrj 	 what it originally did, do this SUBREG as a force_to_mode.  */
829138fd1498Szrj       {
829238fd1498Szrj 	rtx inner = SUBREG_REG (x), simplified;
829338fd1498Szrj 	enum rtx_code subreg_code = in_code;
829438fd1498Szrj 
829538fd1498Szrj 	/* If the SUBREG is masking of a logical right shift,
829638fd1498Szrj 	   make an extraction.  */
829738fd1498Szrj 	if (GET_CODE (inner) == LSHIFTRT
829838fd1498Szrj 	    && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
829938fd1498Szrj 	    && GET_MODE_SIZE (mode) < GET_MODE_SIZE (inner_mode)
830038fd1498Szrj 	    && CONST_INT_P (XEXP (inner, 1))
830138fd1498Szrj 	    && UINTVAL (XEXP (inner, 1)) < GET_MODE_PRECISION (inner_mode)
830238fd1498Szrj 	    && subreg_lowpart_p (x))
830338fd1498Szrj 	  {
830438fd1498Szrj 	    new_rtx = make_compound_operation (XEXP (inner, 0), next_code);
830538fd1498Szrj 	    int width = GET_MODE_PRECISION (inner_mode)
830638fd1498Szrj 			- INTVAL (XEXP (inner, 1));
830738fd1498Szrj 	    if (width > mode_width)
830838fd1498Szrj 	      width = mode_width;
830938fd1498Szrj 	    new_rtx = make_extraction (mode, new_rtx, 0, XEXP (inner, 1),
831038fd1498Szrj 				       width, 1, 0, in_code == COMPARE);
831138fd1498Szrj 	    break;
831238fd1498Szrj 	  }
831338fd1498Szrj 
831438fd1498Szrj 	/* If in_code is COMPARE, it isn't always safe to pass it through
831538fd1498Szrj 	   to the recursive make_compound_operation call.  */
831638fd1498Szrj 	if (subreg_code == COMPARE
831738fd1498Szrj 	    && (!subreg_lowpart_p (x)
831838fd1498Szrj 		|| GET_CODE (inner) == SUBREG
831938fd1498Szrj 		/* (subreg:SI (and:DI (reg:DI) (const_int 0x800000000)) 0)
832038fd1498Szrj 		   is (const_int 0), rather than
832138fd1498Szrj 		   (subreg:SI (lshiftrt:DI (reg:DI) (const_int 35)) 0).
832238fd1498Szrj 		   Similarly (subreg:QI (and:SI (reg:SI) (const_int 0x80)) 0)
832338fd1498Szrj 		   for non-equality comparisons against 0 is not equivalent
832438fd1498Szrj 		   to (subreg:QI (lshiftrt:SI (reg:SI) (const_int 7)) 0).  */
832538fd1498Szrj 		|| (GET_CODE (inner) == AND
832638fd1498Szrj 		    && CONST_INT_P (XEXP (inner, 1))
832738fd1498Szrj 		    && partial_subreg_p (x)
832838fd1498Szrj 		    && exact_log2 (UINTVAL (XEXP (inner, 1)))
832938fd1498Szrj 		       >= GET_MODE_BITSIZE (mode) - 1)))
833038fd1498Szrj 	  subreg_code = SET;
833138fd1498Szrj 
833238fd1498Szrj 	tem = make_compound_operation (inner, subreg_code);
833338fd1498Szrj 
833438fd1498Szrj 	simplified
833538fd1498Szrj 	  = simplify_subreg (mode, tem, GET_MODE (inner), SUBREG_BYTE (x));
833638fd1498Szrj 	if (simplified)
833738fd1498Szrj 	  tem = simplified;
833838fd1498Szrj 
833938fd1498Szrj 	if (GET_CODE (tem) != GET_CODE (inner)
834038fd1498Szrj 	    && partial_subreg_p (x)
834138fd1498Szrj 	    && subreg_lowpart_p (x))
834238fd1498Szrj 	  {
834338fd1498Szrj 	    rtx newer
834438fd1498Szrj 	      = force_to_mode (tem, mode, HOST_WIDE_INT_M1U, 0);
834538fd1498Szrj 
834638fd1498Szrj 	    /* If we have something other than a SUBREG, we might have
834738fd1498Szrj 	       done an expansion, so rerun ourselves.  */
834838fd1498Szrj 	    if (GET_CODE (newer) != SUBREG)
834938fd1498Szrj 	      newer = make_compound_operation (newer, in_code);
835038fd1498Szrj 
835138fd1498Szrj 	    /* force_to_mode can expand compounds.  If it just re-expanded
835238fd1498Szrj 	       the compound, use gen_lowpart to convert to the desired
835338fd1498Szrj 	       mode.  */
835438fd1498Szrj 	    if (rtx_equal_p (newer, x)
835538fd1498Szrj 		/* Likewise if it re-expanded the compound only partially.
835638fd1498Szrj 		   This happens for SUBREG of ZERO_EXTRACT if they extract
835738fd1498Szrj 		   the same number of bits.  */
835838fd1498Szrj 		|| (GET_CODE (newer) == SUBREG
835938fd1498Szrj 		    && (GET_CODE (SUBREG_REG (newer)) == LSHIFTRT
836038fd1498Szrj 			|| GET_CODE (SUBREG_REG (newer)) == ASHIFTRT)
836138fd1498Szrj 		    && GET_CODE (inner) == AND
836238fd1498Szrj 		    && rtx_equal_p (SUBREG_REG (newer), XEXP (inner, 0))))
836338fd1498Szrj 	      return gen_lowpart (GET_MODE (x), tem);
836438fd1498Szrj 
836538fd1498Szrj 	    return newer;
836638fd1498Szrj 	  }
836738fd1498Szrj 
836838fd1498Szrj 	if (simplified)
836938fd1498Szrj 	  return tem;
837038fd1498Szrj       }
837138fd1498Szrj       break;
837238fd1498Szrj 
837338fd1498Szrj     default:
837438fd1498Szrj       break;
837538fd1498Szrj     }
837638fd1498Szrj 
837738fd1498Szrj   if (new_rtx)
837838fd1498Szrj     *x_ptr = gen_lowpart (mode, new_rtx);
837938fd1498Szrj   *next_code_ptr = next_code;
838038fd1498Szrj   return NULL_RTX;
838138fd1498Szrj }
838238fd1498Szrj 
838338fd1498Szrj /* Look at the expression rooted at X.  Look for expressions
838438fd1498Szrj    equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
838538fd1498Szrj    Form these expressions.
838638fd1498Szrj 
838738fd1498Szrj    Return the new rtx, usually just X.
838838fd1498Szrj 
838938fd1498Szrj    Also, for machines like the VAX that don't have logical shift insns,
839038fd1498Szrj    try to convert logical to arithmetic shift operations in cases where
839138fd1498Szrj    they are equivalent.  This undoes the canonicalizations to logical
839238fd1498Szrj    shifts done elsewhere.
839338fd1498Szrj 
839438fd1498Szrj    We try, as much as possible, to re-use rtl expressions to save memory.
839538fd1498Szrj 
839638fd1498Szrj    IN_CODE says what kind of expression we are processing.  Normally, it is
839738fd1498Szrj    SET.  In a memory address it is MEM.  When processing the arguments of
839838fd1498Szrj    a comparison or a COMPARE against zero, it is COMPARE, or EQ if more
839938fd1498Szrj    precisely it is an equality comparison against zero.  */
840038fd1498Szrj 
840138fd1498Szrj rtx
make_compound_operation(rtx x,enum rtx_code in_code)840238fd1498Szrj make_compound_operation (rtx x, enum rtx_code in_code)
840338fd1498Szrj {
840438fd1498Szrj   enum rtx_code code = GET_CODE (x);
840538fd1498Szrj   const char *fmt;
840638fd1498Szrj   int i, j;
840738fd1498Szrj   enum rtx_code next_code;
840838fd1498Szrj   rtx new_rtx, tem;
840938fd1498Szrj 
841038fd1498Szrj   /* Select the code to be used in recursive calls.  Once we are inside an
841138fd1498Szrj      address, we stay there.  If we have a comparison, set to COMPARE,
841238fd1498Szrj      but once inside, go back to our default of SET.  */
841338fd1498Szrj 
841438fd1498Szrj   next_code = (code == MEM ? MEM
841538fd1498Szrj 	       : ((code == COMPARE || COMPARISON_P (x))
841638fd1498Szrj 		  && XEXP (x, 1) == const0_rtx) ? COMPARE
841738fd1498Szrj 	       : in_code == COMPARE || in_code == EQ ? SET : in_code);
841838fd1498Szrj 
841938fd1498Szrj   scalar_int_mode mode;
842038fd1498Szrj   if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
842138fd1498Szrj     {
842238fd1498Szrj       rtx new_rtx = make_compound_operation_int (mode, &x, in_code,
842338fd1498Szrj 						 &next_code);
842438fd1498Szrj       if (new_rtx)
842538fd1498Szrj 	return new_rtx;
842638fd1498Szrj       code = GET_CODE (x);
842738fd1498Szrj     }
842838fd1498Szrj 
842938fd1498Szrj   /* Now recursively process each operand of this operation.  We need to
843038fd1498Szrj      handle ZERO_EXTEND specially so that we don't lose track of the
843138fd1498Szrj      inner mode.  */
843238fd1498Szrj   if (code == ZERO_EXTEND)
843338fd1498Szrj     {
843438fd1498Szrj       new_rtx = make_compound_operation (XEXP (x, 0), next_code);
843538fd1498Szrj       tem = simplify_const_unary_operation (ZERO_EXTEND, GET_MODE (x),
843638fd1498Szrj 					    new_rtx, GET_MODE (XEXP (x, 0)));
843738fd1498Szrj       if (tem)
843838fd1498Szrj 	return tem;
843938fd1498Szrj       SUBST (XEXP (x, 0), new_rtx);
844038fd1498Szrj       return x;
844138fd1498Szrj     }
844238fd1498Szrj 
844338fd1498Szrj   fmt = GET_RTX_FORMAT (code);
844438fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++)
844538fd1498Szrj     if (fmt[i] == 'e')
844638fd1498Szrj       {
844738fd1498Szrj 	new_rtx = make_compound_operation (XEXP (x, i), next_code);
844838fd1498Szrj 	SUBST (XEXP (x, i), new_rtx);
844938fd1498Szrj       }
845038fd1498Szrj     else if (fmt[i] == 'E')
845138fd1498Szrj       for (j = 0; j < XVECLEN (x, i); j++)
845238fd1498Szrj 	{
845338fd1498Szrj 	  new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
845438fd1498Szrj 	  SUBST (XVECEXP (x, i, j), new_rtx);
845538fd1498Szrj 	}
845638fd1498Szrj 
845738fd1498Szrj   maybe_swap_commutative_operands (x);
845838fd1498Szrj   return x;
845938fd1498Szrj }
846038fd1498Szrj 
846138fd1498Szrj /* Given M see if it is a value that would select a field of bits
846238fd1498Szrj    within an item, but not the entire word.  Return -1 if not.
846338fd1498Szrj    Otherwise, return the starting position of the field, where 0 is the
846438fd1498Szrj    low-order bit.
846538fd1498Szrj 
846638fd1498Szrj    *PLEN is set to the length of the field.  */
846738fd1498Szrj 
846838fd1498Szrj static int
get_pos_from_mask(unsigned HOST_WIDE_INT m,unsigned HOST_WIDE_INT * plen)846938fd1498Szrj get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
847038fd1498Szrj {
847138fd1498Szrj   /* Get the bit number of the first 1 bit from the right, -1 if none.  */
847238fd1498Szrj   int pos = m ? ctz_hwi (m) : -1;
847338fd1498Szrj   int len = 0;
847438fd1498Szrj 
847538fd1498Szrj   if (pos >= 0)
847638fd1498Szrj     /* Now shift off the low-order zero bits and see if we have a
847738fd1498Szrj        power of two minus 1.  */
847838fd1498Szrj     len = exact_log2 ((m >> pos) + 1);
847938fd1498Szrj 
848038fd1498Szrj   if (len <= 0)
848138fd1498Szrj     pos = -1;
848238fd1498Szrj 
848338fd1498Szrj   *plen = len;
848438fd1498Szrj   return pos;
848538fd1498Szrj }
848638fd1498Szrj 
848738fd1498Szrj /* If X refers to a register that equals REG in value, replace these
848838fd1498Szrj    references with REG.  */
848938fd1498Szrj static rtx
canon_reg_for_combine(rtx x,rtx reg)849038fd1498Szrj canon_reg_for_combine (rtx x, rtx reg)
849138fd1498Szrj {
849238fd1498Szrj   rtx op0, op1, op2;
849338fd1498Szrj   const char *fmt;
849438fd1498Szrj   int i;
849538fd1498Szrj   bool copied;
849638fd1498Szrj 
849738fd1498Szrj   enum rtx_code code = GET_CODE (x);
849838fd1498Szrj   switch (GET_RTX_CLASS (code))
849938fd1498Szrj     {
850038fd1498Szrj     case RTX_UNARY:
850138fd1498Szrj       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
850238fd1498Szrj       if (op0 != XEXP (x, 0))
850338fd1498Szrj 	return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
850438fd1498Szrj 				   GET_MODE (reg));
850538fd1498Szrj       break;
850638fd1498Szrj 
850738fd1498Szrj     case RTX_BIN_ARITH:
850838fd1498Szrj     case RTX_COMM_ARITH:
850938fd1498Szrj       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
851038fd1498Szrj       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
851138fd1498Szrj       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
851238fd1498Szrj 	return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
851338fd1498Szrj       break;
851438fd1498Szrj 
851538fd1498Szrj     case RTX_COMPARE:
851638fd1498Szrj     case RTX_COMM_COMPARE:
851738fd1498Szrj       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
851838fd1498Szrj       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
851938fd1498Szrj       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
852038fd1498Szrj 	return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
852138fd1498Szrj 					GET_MODE (op0), op0, op1);
852238fd1498Szrj       break;
852338fd1498Szrj 
852438fd1498Szrj     case RTX_TERNARY:
852538fd1498Szrj     case RTX_BITFIELD_OPS:
852638fd1498Szrj       op0 = canon_reg_for_combine (XEXP (x, 0), reg);
852738fd1498Szrj       op1 = canon_reg_for_combine (XEXP (x, 1), reg);
852838fd1498Szrj       op2 = canon_reg_for_combine (XEXP (x, 2), reg);
852938fd1498Szrj       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
853038fd1498Szrj 	return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
853138fd1498Szrj 				     GET_MODE (op0), op0, op1, op2);
853238fd1498Szrj       /* FALLTHRU */
853338fd1498Szrj 
853438fd1498Szrj     case RTX_OBJ:
853538fd1498Szrj       if (REG_P (x))
853638fd1498Szrj 	{
853738fd1498Szrj 	  if (rtx_equal_p (get_last_value (reg), x)
853838fd1498Szrj 	      || rtx_equal_p (reg, get_last_value (x)))
853938fd1498Szrj 	    return reg;
854038fd1498Szrj 	  else
854138fd1498Szrj 	    break;
854238fd1498Szrj 	}
854338fd1498Szrj 
854438fd1498Szrj       /* fall through */
854538fd1498Szrj 
854638fd1498Szrj     default:
854738fd1498Szrj       fmt = GET_RTX_FORMAT (code);
854838fd1498Szrj       copied = false;
854938fd1498Szrj       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
855038fd1498Szrj 	if (fmt[i] == 'e')
855138fd1498Szrj 	  {
855238fd1498Szrj 	    rtx op = canon_reg_for_combine (XEXP (x, i), reg);
855338fd1498Szrj 	    if (op != XEXP (x, i))
855438fd1498Szrj 	      {
855538fd1498Szrj 		if (!copied)
855638fd1498Szrj 		  {
855738fd1498Szrj 		    copied = true;
855838fd1498Szrj 		    x = copy_rtx (x);
855938fd1498Szrj 		  }
856038fd1498Szrj 		XEXP (x, i) = op;
856138fd1498Szrj 	      }
856238fd1498Szrj 	  }
856338fd1498Szrj 	else if (fmt[i] == 'E')
856438fd1498Szrj 	  {
856538fd1498Szrj 	    int j;
856638fd1498Szrj 	    for (j = 0; j < XVECLEN (x, i); j++)
856738fd1498Szrj 	      {
856838fd1498Szrj 		rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
856938fd1498Szrj 		if (op != XVECEXP (x, i, j))
857038fd1498Szrj 		  {
857138fd1498Szrj 		    if (!copied)
857238fd1498Szrj 		      {
857338fd1498Szrj 			copied = true;
857438fd1498Szrj 			x = copy_rtx (x);
857538fd1498Szrj 		      }
857638fd1498Szrj 		    XVECEXP (x, i, j) = op;
857738fd1498Szrj 		  }
857838fd1498Szrj 	      }
857938fd1498Szrj 	  }
858038fd1498Szrj 
858138fd1498Szrj       break;
858238fd1498Szrj     }
858338fd1498Szrj 
858438fd1498Szrj   return x;
858538fd1498Szrj }
858638fd1498Szrj 
858738fd1498Szrj /* Return X converted to MODE.  If the value is already truncated to
858838fd1498Szrj    MODE we can just return a subreg even though in the general case we
858938fd1498Szrj    would need an explicit truncation.  */
859038fd1498Szrj 
859138fd1498Szrj static rtx
gen_lowpart_or_truncate(machine_mode mode,rtx x)859238fd1498Szrj gen_lowpart_or_truncate (machine_mode mode, rtx x)
859338fd1498Szrj {
859438fd1498Szrj   if (!CONST_INT_P (x)
859538fd1498Szrj       && partial_subreg_p (mode, GET_MODE (x))
859638fd1498Szrj       && !TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x))
859738fd1498Szrj       && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
859838fd1498Szrj     {
859938fd1498Szrj       /* Bit-cast X into an integer mode.  */
860038fd1498Szrj       if (!SCALAR_INT_MODE_P (GET_MODE (x)))
860138fd1498Szrj 	x = gen_lowpart (int_mode_for_mode (GET_MODE (x)).require (), x);
860238fd1498Szrj       x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode).require (),
860338fd1498Szrj 			      x, GET_MODE (x));
860438fd1498Szrj     }
860538fd1498Szrj 
860638fd1498Szrj   return gen_lowpart (mode, x);
860738fd1498Szrj }
860838fd1498Szrj 
860938fd1498Szrj /* See if X can be simplified knowing that we will only refer to it in
861038fd1498Szrj    MODE and will only refer to those bits that are nonzero in MASK.
861138fd1498Szrj    If other bits are being computed or if masking operations are done
861238fd1498Szrj    that select a superset of the bits in MASK, they can sometimes be
861338fd1498Szrj    ignored.
861438fd1498Szrj 
861538fd1498Szrj    Return a possibly simplified expression, but always convert X to
861638fd1498Szrj    MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
861738fd1498Szrj 
861838fd1498Szrj    If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
861938fd1498Szrj    are all off in X.  This is used when X will be complemented, by either
862038fd1498Szrj    NOT, NEG, or XOR.  */
862138fd1498Szrj 
862238fd1498Szrj static rtx
force_to_mode(rtx x,machine_mode mode,unsigned HOST_WIDE_INT mask,int just_select)862338fd1498Szrj force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
862438fd1498Szrj 	       int just_select)
862538fd1498Szrj {
862638fd1498Szrj   enum rtx_code code = GET_CODE (x);
862738fd1498Szrj   int next_select = just_select || code == XOR || code == NOT || code == NEG;
862838fd1498Szrj   machine_mode op_mode;
862938fd1498Szrj   unsigned HOST_WIDE_INT nonzero;
863038fd1498Szrj 
863138fd1498Szrj   /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
863238fd1498Szrj      code below will do the wrong thing since the mode of such an
863338fd1498Szrj      expression is VOIDmode.
863438fd1498Szrj 
863538fd1498Szrj      Also do nothing if X is a CLOBBER; this can happen if X was
863638fd1498Szrj      the return value from a call to gen_lowpart.  */
863738fd1498Szrj   if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
863838fd1498Szrj     return x;
863938fd1498Szrj 
864038fd1498Szrj   /* We want to perform the operation in its present mode unless we know
864138fd1498Szrj      that the operation is valid in MODE, in which case we do the operation
864238fd1498Szrj      in MODE.  */
864338fd1498Szrj   op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
864438fd1498Szrj 	      && have_insn_for (code, mode))
864538fd1498Szrj 	     ? mode : GET_MODE (x));
864638fd1498Szrj 
864738fd1498Szrj   /* It is not valid to do a right-shift in a narrower mode
864838fd1498Szrj      than the one it came in with.  */
864938fd1498Szrj   if ((code == LSHIFTRT || code == ASHIFTRT)
865038fd1498Szrj       && partial_subreg_p (mode, GET_MODE (x)))
865138fd1498Szrj     op_mode = GET_MODE (x);
865238fd1498Szrj 
865338fd1498Szrj   /* Truncate MASK to fit OP_MODE.  */
865438fd1498Szrj   if (op_mode)
865538fd1498Szrj     mask &= GET_MODE_MASK (op_mode);
865638fd1498Szrj 
865738fd1498Szrj   /* Determine what bits of X are guaranteed to be (non)zero.  */
865838fd1498Szrj   nonzero = nonzero_bits (x, mode);
865938fd1498Szrj 
866038fd1498Szrj   /* If none of the bits in X are needed, return a zero.  */
866138fd1498Szrj   if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
866238fd1498Szrj     x = const0_rtx;
866338fd1498Szrj 
866438fd1498Szrj   /* If X is a CONST_INT, return a new one.  Do this here since the
866538fd1498Szrj      test below will fail.  */
866638fd1498Szrj   if (CONST_INT_P (x))
866738fd1498Szrj     {
866838fd1498Szrj       if (SCALAR_INT_MODE_P (mode))
866938fd1498Szrj 	return gen_int_mode (INTVAL (x) & mask, mode);
867038fd1498Szrj       else
867138fd1498Szrj 	{
867238fd1498Szrj 	  x = GEN_INT (INTVAL (x) & mask);
867338fd1498Szrj 	  return gen_lowpart_common (mode, x);
867438fd1498Szrj 	}
867538fd1498Szrj     }
867638fd1498Szrj 
867738fd1498Szrj   /* If X is narrower than MODE and we want all the bits in X's mode, just
867838fd1498Szrj      get X in the proper mode.  */
867938fd1498Szrj   if (paradoxical_subreg_p (mode, GET_MODE (x))
868038fd1498Szrj       && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
868138fd1498Szrj     return gen_lowpart (mode, x);
868238fd1498Szrj 
868338fd1498Szrj   /* We can ignore the effect of a SUBREG if it narrows the mode or
868438fd1498Szrj      if the constant masks to zero all the bits the mode doesn't have.  */
868538fd1498Szrj   if (GET_CODE (x) == SUBREG
868638fd1498Szrj       && subreg_lowpart_p (x)
868738fd1498Szrj       && (partial_subreg_p (x)
868838fd1498Szrj 	  || (mask
868938fd1498Szrj 	      & GET_MODE_MASK (GET_MODE (x))
869038fd1498Szrj 	      & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0))
869138fd1498Szrj     return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
869238fd1498Szrj 
869338fd1498Szrj   scalar_int_mode int_mode, xmode;
869438fd1498Szrj   if (is_a <scalar_int_mode> (mode, &int_mode)
869538fd1498Szrj       && is_a <scalar_int_mode> (GET_MODE (x), &xmode))
869638fd1498Szrj     /* OP_MODE is either MODE or XMODE, so it must be a scalar
869738fd1498Szrj        integer too.  */
869838fd1498Szrj     return force_int_to_mode (x, int_mode, xmode,
869938fd1498Szrj 			      as_a <scalar_int_mode> (op_mode),
870038fd1498Szrj 			      mask, just_select);
870138fd1498Szrj 
870238fd1498Szrj   return gen_lowpart_or_truncate (mode, x);
870338fd1498Szrj }
870438fd1498Szrj 
870538fd1498Szrj /* Subroutine of force_to_mode that handles cases in which both X and
870638fd1498Szrj    the result are scalar integers.  MODE is the mode of the result,
870738fd1498Szrj    XMODE is the mode of X, and OP_MODE says which of MODE or XMODE
870838fd1498Szrj    is preferred for simplified versions of X.  The other arguments
870938fd1498Szrj    are as for force_to_mode.  */
871038fd1498Szrj 
871138fd1498Szrj static rtx
force_int_to_mode(rtx x,scalar_int_mode mode,scalar_int_mode xmode,scalar_int_mode op_mode,unsigned HOST_WIDE_INT mask,int just_select)871238fd1498Szrj force_int_to_mode (rtx x, scalar_int_mode mode, scalar_int_mode xmode,
871338fd1498Szrj 		   scalar_int_mode op_mode, unsigned HOST_WIDE_INT mask,
871438fd1498Szrj 		   int just_select)
871538fd1498Szrj {
871638fd1498Szrj   enum rtx_code code = GET_CODE (x);
871738fd1498Szrj   int next_select = just_select || code == XOR || code == NOT || code == NEG;
871838fd1498Szrj   unsigned HOST_WIDE_INT fuller_mask;
871938fd1498Szrj   rtx op0, op1, temp;
872038fd1498Szrj 
872138fd1498Szrj   /* When we have an arithmetic operation, or a shift whose count we
872238fd1498Szrj      do not know, we need to assume that all bits up to the highest-order
872338fd1498Szrj      bit in MASK will be needed.  This is how we form such a mask.  */
872438fd1498Szrj   if (mask & (HOST_WIDE_INT_1U << (HOST_BITS_PER_WIDE_INT - 1)))
872538fd1498Szrj     fuller_mask = HOST_WIDE_INT_M1U;
872638fd1498Szrj   else
872738fd1498Szrj     fuller_mask = ((HOST_WIDE_INT_1U << (floor_log2 (mask) + 1))
872838fd1498Szrj 		   - 1);
872938fd1498Szrj 
873038fd1498Szrj   switch (code)
873138fd1498Szrj     {
873238fd1498Szrj     case CLOBBER:
873338fd1498Szrj       /* If X is a (clobber (const_int)), return it since we know we are
873438fd1498Szrj 	 generating something that won't match.  */
873538fd1498Szrj       return x;
873638fd1498Szrj 
873738fd1498Szrj     case SIGN_EXTEND:
873838fd1498Szrj     case ZERO_EXTEND:
873938fd1498Szrj     case ZERO_EXTRACT:
874038fd1498Szrj     case SIGN_EXTRACT:
874138fd1498Szrj       x = expand_compound_operation (x);
874238fd1498Szrj       if (GET_CODE (x) != code)
874338fd1498Szrj 	return force_to_mode (x, mode, mask, next_select);
874438fd1498Szrj       break;
874538fd1498Szrj 
874638fd1498Szrj     case TRUNCATE:
874738fd1498Szrj       /* Similarly for a truncate.  */
874838fd1498Szrj       return force_to_mode (XEXP (x, 0), mode, mask, next_select);
874938fd1498Szrj 
875038fd1498Szrj     case AND:
875138fd1498Szrj       /* If this is an AND with a constant, convert it into an AND
875238fd1498Szrj 	 whose constant is the AND of that constant with MASK.  If it
875338fd1498Szrj 	 remains an AND of MASK, delete it since it is redundant.  */
875438fd1498Szrj 
875538fd1498Szrj       if (CONST_INT_P (XEXP (x, 1)))
875638fd1498Szrj 	{
875738fd1498Szrj 	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
875838fd1498Szrj 				      mask & INTVAL (XEXP (x, 1)));
875938fd1498Szrj 	  xmode = op_mode;
876038fd1498Szrj 
876138fd1498Szrj 	  /* If X is still an AND, see if it is an AND with a mask that
876238fd1498Szrj 	     is just some low-order bits.  If so, and it is MASK, we don't
876338fd1498Szrj 	     need it.  */
876438fd1498Szrj 
876538fd1498Szrj 	  if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
876638fd1498Szrj 	      && (INTVAL (XEXP (x, 1)) & GET_MODE_MASK (xmode)) == mask)
876738fd1498Szrj 	    x = XEXP (x, 0);
876838fd1498Szrj 
876938fd1498Szrj 	  /* If it remains an AND, try making another AND with the bits
877038fd1498Szrj 	     in the mode mask that aren't in MASK turned on.  If the
877138fd1498Szrj 	     constant in the AND is wide enough, this might make a
877238fd1498Szrj 	     cheaper constant.  */
877338fd1498Szrj 
877438fd1498Szrj 	  if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
877538fd1498Szrj 	      && GET_MODE_MASK (xmode) != mask
877638fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (xmode))
877738fd1498Szrj 	    {
877838fd1498Szrj 	      unsigned HOST_WIDE_INT cval
877938fd1498Szrj 		= UINTVAL (XEXP (x, 1)) | (GET_MODE_MASK (xmode) & ~mask);
878038fd1498Szrj 	      rtx y;
878138fd1498Szrj 
878238fd1498Szrj 	      y = simplify_gen_binary (AND, xmode, XEXP (x, 0),
878338fd1498Szrj 				       gen_int_mode (cval, xmode));
878438fd1498Szrj 	      if (set_src_cost (y, xmode, optimize_this_for_speed_p)
878538fd1498Szrj 		  < set_src_cost (x, xmode, optimize_this_for_speed_p))
878638fd1498Szrj 		x = y;
878738fd1498Szrj 	    }
878838fd1498Szrj 
878938fd1498Szrj 	  break;
879038fd1498Szrj 	}
879138fd1498Szrj 
879238fd1498Szrj       goto binop;
879338fd1498Szrj 
879438fd1498Szrj     case PLUS:
879538fd1498Szrj       /* In (and (plus FOO C1) M), if M is a mask that just turns off
879638fd1498Szrj 	 low-order bits (as in an alignment operation) and FOO is already
879738fd1498Szrj 	 aligned to that boundary, mask C1 to that boundary as well.
879838fd1498Szrj 	 This may eliminate that PLUS and, later, the AND.  */
879938fd1498Szrj 
880038fd1498Szrj       {
880138fd1498Szrj 	unsigned int width = GET_MODE_PRECISION (mode);
880238fd1498Szrj 	unsigned HOST_WIDE_INT smask = mask;
880338fd1498Szrj 
880438fd1498Szrj 	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
880538fd1498Szrj 	   number, sign extend it.  */
880638fd1498Szrj 
880738fd1498Szrj 	if (width < HOST_BITS_PER_WIDE_INT
880838fd1498Szrj 	    && (smask & (HOST_WIDE_INT_1U << (width - 1))) != 0)
880938fd1498Szrj 	  smask |= HOST_WIDE_INT_M1U << width;
881038fd1498Szrj 
881138fd1498Szrj 	if (CONST_INT_P (XEXP (x, 1))
881238fd1498Szrj 	    && pow2p_hwi (- smask)
881338fd1498Szrj 	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
881438fd1498Szrj 	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
881538fd1498Szrj 	  return force_to_mode (plus_constant (xmode, XEXP (x, 0),
881638fd1498Szrj 					       (INTVAL (XEXP (x, 1)) & smask)),
881738fd1498Szrj 				mode, smask, next_select);
881838fd1498Szrj       }
881938fd1498Szrj 
882038fd1498Szrj       /* fall through */
882138fd1498Szrj 
882238fd1498Szrj     case MULT:
882338fd1498Szrj       /* Substituting into the operands of a widening MULT is not likely to
882438fd1498Szrj 	 create RTL matching a machine insn.  */
882538fd1498Szrj       if (code == MULT
882638fd1498Szrj 	  && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
882738fd1498Szrj 	      || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
882838fd1498Szrj 	  && (GET_CODE (XEXP (x, 1)) == ZERO_EXTEND
882938fd1498Szrj 	      || GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
883038fd1498Szrj 	  && REG_P (XEXP (XEXP (x, 0), 0))
883138fd1498Szrj 	  && REG_P (XEXP (XEXP (x, 1), 0)))
883238fd1498Szrj 	return gen_lowpart_or_truncate (mode, x);
883338fd1498Szrj 
883438fd1498Szrj       /* For PLUS, MINUS and MULT, we need any bits less significant than the
883538fd1498Szrj 	 most significant bit in MASK since carries from those bits will
883638fd1498Szrj 	 affect the bits we are interested in.  */
883738fd1498Szrj       mask = fuller_mask;
883838fd1498Szrj       goto binop;
883938fd1498Szrj 
884038fd1498Szrj     case MINUS:
884138fd1498Szrj       /* If X is (minus C Y) where C's least set bit is larger than any bit
884238fd1498Szrj 	 in the mask, then we may replace with (neg Y).  */
884338fd1498Szrj       if (CONST_INT_P (XEXP (x, 0))
884438fd1498Szrj 	  && least_bit_hwi (UINTVAL (XEXP (x, 0))) > mask)
884538fd1498Szrj 	{
884638fd1498Szrj 	  x = simplify_gen_unary (NEG, xmode, XEXP (x, 1), xmode);
884738fd1498Szrj 	  return force_to_mode (x, mode, mask, next_select);
884838fd1498Szrj 	}
884938fd1498Szrj 
885038fd1498Szrj       /* Similarly, if C contains every bit in the fuller_mask, then we may
885138fd1498Szrj 	 replace with (not Y).  */
885238fd1498Szrj       if (CONST_INT_P (XEXP (x, 0))
885338fd1498Szrj 	  && ((UINTVAL (XEXP (x, 0)) | fuller_mask) == UINTVAL (XEXP (x, 0))))
885438fd1498Szrj 	{
885538fd1498Szrj 	  x = simplify_gen_unary (NOT, xmode, XEXP (x, 1), xmode);
885638fd1498Szrj 	  return force_to_mode (x, mode, mask, next_select);
885738fd1498Szrj 	}
885838fd1498Szrj 
885938fd1498Szrj       mask = fuller_mask;
886038fd1498Szrj       goto binop;
886138fd1498Szrj 
886238fd1498Szrj     case IOR:
886338fd1498Szrj     case XOR:
886438fd1498Szrj       /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
886538fd1498Szrj 	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
886638fd1498Szrj 	 operation which may be a bitfield extraction.  Ensure that the
886738fd1498Szrj 	 constant we form is not wider than the mode of X.  */
886838fd1498Szrj 
886938fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
887038fd1498Szrj 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
887138fd1498Szrj 	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
887238fd1498Szrj 	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
887338fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
887438fd1498Szrj 	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
887538fd1498Szrj 	       + floor_log2 (INTVAL (XEXP (x, 1))))
887638fd1498Szrj 	      < GET_MODE_PRECISION (xmode))
887738fd1498Szrj 	  && (UINTVAL (XEXP (x, 1))
887838fd1498Szrj 	      & ~nonzero_bits (XEXP (x, 0), xmode)) == 0)
887938fd1498Szrj 	{
888038fd1498Szrj 	  temp = gen_int_mode ((INTVAL (XEXP (x, 1)) & mask)
888138fd1498Szrj 			       << INTVAL (XEXP (XEXP (x, 0), 1)),
888238fd1498Szrj 			       xmode);
888338fd1498Szrj 	  temp = simplify_gen_binary (GET_CODE (x), xmode,
888438fd1498Szrj 				      XEXP (XEXP (x, 0), 0), temp);
888538fd1498Szrj 	  x = simplify_gen_binary (LSHIFTRT, xmode, temp,
888638fd1498Szrj 				   XEXP (XEXP (x, 0), 1));
888738fd1498Szrj 	  return force_to_mode (x, mode, mask, next_select);
888838fd1498Szrj 	}
888938fd1498Szrj 
889038fd1498Szrj     binop:
889138fd1498Szrj       /* For most binary operations, just propagate into the operation and
889238fd1498Szrj 	 change the mode if we have an operation of that mode.  */
889338fd1498Szrj 
889438fd1498Szrj       op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
889538fd1498Szrj       op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
889638fd1498Szrj 
889738fd1498Szrj       /* If we ended up truncating both operands, truncate the result of the
889838fd1498Szrj 	 operation instead.  */
889938fd1498Szrj       if (GET_CODE (op0) == TRUNCATE
890038fd1498Szrj 	  && GET_CODE (op1) == TRUNCATE)
890138fd1498Szrj 	{
890238fd1498Szrj 	  op0 = XEXP (op0, 0);
890338fd1498Szrj 	  op1 = XEXP (op1, 0);
890438fd1498Szrj 	}
890538fd1498Szrj 
890638fd1498Szrj       op0 = gen_lowpart_or_truncate (op_mode, op0);
890738fd1498Szrj       op1 = gen_lowpart_or_truncate (op_mode, op1);
890838fd1498Szrj 
890938fd1498Szrj       if (op_mode != xmode || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
891038fd1498Szrj 	{
891138fd1498Szrj 	  x = simplify_gen_binary (code, op_mode, op0, op1);
891238fd1498Szrj 	  xmode = op_mode;
891338fd1498Szrj 	}
891438fd1498Szrj       break;
891538fd1498Szrj 
891638fd1498Szrj     case ASHIFT:
891738fd1498Szrj       /* For left shifts, do the same, but just for the first operand.
891838fd1498Szrj 	 However, we cannot do anything with shifts where we cannot
891938fd1498Szrj 	 guarantee that the counts are smaller than the size of the mode
892038fd1498Szrj 	 because such a count will have a different meaning in a
892138fd1498Szrj 	 wider mode.  */
892238fd1498Szrj 
892338fd1498Szrj       if (! (CONST_INT_P (XEXP (x, 1))
892438fd1498Szrj 	     && INTVAL (XEXP (x, 1)) >= 0
892538fd1498Szrj 	     && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (mode))
892638fd1498Szrj 	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
892738fd1498Szrj 		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
892838fd1498Szrj 		    < (unsigned HOST_WIDE_INT) GET_MODE_PRECISION (mode))))
892938fd1498Szrj 	break;
893038fd1498Szrj 
893138fd1498Szrj       /* If the shift count is a constant and we can do arithmetic in
893238fd1498Szrj 	 the mode of the shift, refine which bits we need.  Otherwise, use the
893338fd1498Szrj 	 conservative form of the mask.  */
893438fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
893538fd1498Szrj 	  && INTVAL (XEXP (x, 1)) >= 0
893638fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (op_mode)
893738fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (op_mode))
893838fd1498Szrj 	mask >>= INTVAL (XEXP (x, 1));
893938fd1498Szrj       else
894038fd1498Szrj 	mask = fuller_mask;
894138fd1498Szrj 
894238fd1498Szrj       op0 = gen_lowpart_or_truncate (op_mode,
894338fd1498Szrj 				     force_to_mode (XEXP (x, 0), mode,
894438fd1498Szrj 						    mask, next_select));
894538fd1498Szrj 
894638fd1498Szrj       if (op_mode != xmode || op0 != XEXP (x, 0))
894738fd1498Szrj 	{
894838fd1498Szrj 	  x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
894938fd1498Szrj 	  xmode = op_mode;
895038fd1498Szrj 	}
895138fd1498Szrj       break;
895238fd1498Szrj 
895338fd1498Szrj     case LSHIFTRT:
895438fd1498Szrj       /* Here we can only do something if the shift count is a constant,
895538fd1498Szrj 	 this shift constant is valid for the host, and we can do arithmetic
895638fd1498Szrj 	 in OP_MODE.  */
895738fd1498Szrj 
895838fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
895938fd1498Szrj 	  && INTVAL (XEXP (x, 1)) >= 0
896038fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
896138fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (op_mode))
896238fd1498Szrj 	{
896338fd1498Szrj 	  rtx inner = XEXP (x, 0);
896438fd1498Szrj 	  unsigned HOST_WIDE_INT inner_mask;
896538fd1498Szrj 
896638fd1498Szrj 	  /* Select the mask of the bits we need for the shift operand.  */
896738fd1498Szrj 	  inner_mask = mask << INTVAL (XEXP (x, 1));
896838fd1498Szrj 
896938fd1498Szrj 	  /* We can only change the mode of the shift if we can do arithmetic
897038fd1498Szrj 	     in the mode of the shift and INNER_MASK is no wider than the
897138fd1498Szrj 	     width of X's mode.  */
897238fd1498Szrj 	  if ((inner_mask & ~GET_MODE_MASK (xmode)) != 0)
897338fd1498Szrj 	    op_mode = xmode;
897438fd1498Szrj 
897538fd1498Szrj 	  inner = force_to_mode (inner, op_mode, inner_mask, next_select);
897638fd1498Szrj 
897738fd1498Szrj 	  if (xmode != op_mode || inner != XEXP (x, 0))
897838fd1498Szrj 	    {
897938fd1498Szrj 	      x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
898038fd1498Szrj 	      xmode = op_mode;
898138fd1498Szrj 	    }
898238fd1498Szrj 	}
898338fd1498Szrj 
898438fd1498Szrj       /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
898538fd1498Szrj 	 shift and AND produces only copies of the sign bit (C2 is one less
898638fd1498Szrj 	 than a power of two), we can do this with just a shift.  */
898738fd1498Szrj 
898838fd1498Szrj       if (GET_CODE (x) == LSHIFTRT
898938fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
899038fd1498Szrj 	  /* The shift puts one of the sign bit copies in the least significant
899138fd1498Szrj 	     bit.  */
899238fd1498Szrj 	  && ((INTVAL (XEXP (x, 1))
899338fd1498Szrj 	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
899438fd1498Szrj 	      >= GET_MODE_PRECISION (xmode))
899538fd1498Szrj 	  && pow2p_hwi (mask + 1)
899638fd1498Szrj 	  /* Number of bits left after the shift must be more than the mask
899738fd1498Szrj 	     needs.  */
899838fd1498Szrj 	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
899938fd1498Szrj 	      <= GET_MODE_PRECISION (xmode))
900038fd1498Szrj 	  /* Must be more sign bit copies than the mask needs.  */
900138fd1498Szrj 	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
900238fd1498Szrj 	      >= exact_log2 (mask + 1)))
900338fd1498Szrj 	{
900438fd1498Szrj 	  int nbits = GET_MODE_PRECISION (xmode) - exact_log2 (mask + 1);
900538fd1498Szrj 	  x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0),
900638fd1498Szrj 				   gen_int_shift_amount (xmode, nbits));
900738fd1498Szrj 	}
900838fd1498Szrj       goto shiftrt;
900938fd1498Szrj 
901038fd1498Szrj     case ASHIFTRT:
901138fd1498Szrj       /* If we are just looking for the sign bit, we don't need this shift at
901238fd1498Szrj 	 all, even if it has a variable count.  */
901338fd1498Szrj       if (val_signbit_p (xmode, mask))
901438fd1498Szrj 	return force_to_mode (XEXP (x, 0), mode, mask, next_select);
901538fd1498Szrj 
901638fd1498Szrj       /* If this is a shift by a constant, get a mask that contains those bits
901738fd1498Szrj 	 that are not copies of the sign bit.  We then have two cases:  If
901838fd1498Szrj 	 MASK only includes those bits, this can be a logical shift, which may
901938fd1498Szrj 	 allow simplifications.  If MASK is a single-bit field not within
902038fd1498Szrj 	 those bits, we are requesting a copy of the sign bit and hence can
902138fd1498Szrj 	 shift the sign bit to the appropriate location.  */
902238fd1498Szrj 
902338fd1498Szrj       if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
902438fd1498Szrj 	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
902538fd1498Szrj 	{
902638fd1498Szrj 	  unsigned HOST_WIDE_INT nonzero;
902738fd1498Szrj 	  int i;
902838fd1498Szrj 
902938fd1498Szrj 	  /* If the considered data is wider than HOST_WIDE_INT, we can't
903038fd1498Szrj 	     represent a mask for all its bits in a single scalar.
903138fd1498Szrj 	     But we only care about the lower bits, so calculate these.  */
903238fd1498Szrj 
903338fd1498Szrj 	  if (GET_MODE_PRECISION (xmode) > HOST_BITS_PER_WIDE_INT)
903438fd1498Szrj 	    {
903538fd1498Szrj 	      nonzero = HOST_WIDE_INT_M1U;
903638fd1498Szrj 
903738fd1498Szrj 	      /* GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
903838fd1498Szrj 		 is the number of bits a full-width mask would have set.
903938fd1498Szrj 		 We need only shift if these are fewer than nonzero can
904038fd1498Szrj 		 hold.  If not, we must keep all bits set in nonzero.  */
904138fd1498Szrj 
904238fd1498Szrj 	      if (GET_MODE_PRECISION (xmode) - INTVAL (XEXP (x, 1))
904338fd1498Szrj 		  < HOST_BITS_PER_WIDE_INT)
904438fd1498Szrj 		nonzero >>= INTVAL (XEXP (x, 1))
904538fd1498Szrj 			    + HOST_BITS_PER_WIDE_INT
904638fd1498Szrj 			    - GET_MODE_PRECISION (xmode);
904738fd1498Szrj 	    }
904838fd1498Szrj 	  else
904938fd1498Szrj 	    {
905038fd1498Szrj 	      nonzero = GET_MODE_MASK (xmode);
905138fd1498Szrj 	      nonzero >>= INTVAL (XEXP (x, 1));
905238fd1498Szrj 	    }
905338fd1498Szrj 
905438fd1498Szrj 	  if ((mask & ~nonzero) == 0)
905538fd1498Szrj 	    {
905638fd1498Szrj 	      x = simplify_shift_const (NULL_RTX, LSHIFTRT, xmode,
905738fd1498Szrj 					XEXP (x, 0), INTVAL (XEXP (x, 1)));
905838fd1498Szrj 	      if (GET_CODE (x) != ASHIFTRT)
905938fd1498Szrj 		return force_to_mode (x, mode, mask, next_select);
906038fd1498Szrj 	    }
906138fd1498Szrj 
906238fd1498Szrj 	  else if ((i = exact_log2 (mask)) >= 0)
906338fd1498Szrj 	    {
906438fd1498Szrj 	      x = simplify_shift_const
906538fd1498Szrj 		  (NULL_RTX, LSHIFTRT, xmode, XEXP (x, 0),
906638fd1498Szrj 		   GET_MODE_PRECISION (xmode) - 1 - i);
906738fd1498Szrj 
906838fd1498Szrj 	      if (GET_CODE (x) != ASHIFTRT)
906938fd1498Szrj 		return force_to_mode (x, mode, mask, next_select);
907038fd1498Szrj 	    }
907138fd1498Szrj 	}
907238fd1498Szrj 
907338fd1498Szrj       /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
907438fd1498Szrj 	 even if the shift count isn't a constant.  */
907538fd1498Szrj       if (mask == 1)
907638fd1498Szrj 	x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0), XEXP (x, 1));
907738fd1498Szrj 
907838fd1498Szrj     shiftrt:
907938fd1498Szrj 
908038fd1498Szrj       /* If this is a zero- or sign-extension operation that just affects bits
908138fd1498Szrj 	 we don't care about, remove it.  Be sure the call above returned
908238fd1498Szrj 	 something that is still a shift.  */
908338fd1498Szrj 
908438fd1498Szrj       if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
908538fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
908638fd1498Szrj 	  && INTVAL (XEXP (x, 1)) >= 0
908738fd1498Szrj 	  && (INTVAL (XEXP (x, 1))
908838fd1498Szrj 	      <= GET_MODE_PRECISION (xmode) - (floor_log2 (mask) + 1))
908938fd1498Szrj 	  && GET_CODE (XEXP (x, 0)) == ASHIFT
909038fd1498Szrj 	  && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
909138fd1498Szrj 	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
909238fd1498Szrj 			      next_select);
909338fd1498Szrj 
909438fd1498Szrj       break;
909538fd1498Szrj 
909638fd1498Szrj     case ROTATE:
909738fd1498Szrj     case ROTATERT:
909838fd1498Szrj       /* If the shift count is constant and we can do computations
909938fd1498Szrj 	 in the mode of X, compute where the bits we care about are.
910038fd1498Szrj 	 Otherwise, we can't do anything.  Don't change the mode of
910138fd1498Szrj 	 the shift or propagate MODE into the shift, though.  */
910238fd1498Szrj       if (CONST_INT_P (XEXP (x, 1))
910338fd1498Szrj 	  && INTVAL (XEXP (x, 1)) >= 0)
910438fd1498Szrj 	{
910538fd1498Szrj 	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
910638fd1498Szrj 					    xmode, gen_int_mode (mask, xmode),
910738fd1498Szrj 					    XEXP (x, 1));
910838fd1498Szrj 	  if (temp && CONST_INT_P (temp))
910938fd1498Szrj 	    x = simplify_gen_binary (code, xmode,
911038fd1498Szrj 				     force_to_mode (XEXP (x, 0), xmode,
911138fd1498Szrj 						    INTVAL (temp), next_select),
911238fd1498Szrj 				     XEXP (x, 1));
911338fd1498Szrj 	}
911438fd1498Szrj       break;
911538fd1498Szrj 
911638fd1498Szrj     case NEG:
911738fd1498Szrj       /* If we just want the low-order bit, the NEG isn't needed since it
911838fd1498Szrj 	 won't change the low-order bit.  */
911938fd1498Szrj       if (mask == 1)
912038fd1498Szrj 	return force_to_mode (XEXP (x, 0), mode, mask, just_select);
912138fd1498Szrj 
912238fd1498Szrj       /* We need any bits less significant than the most significant bit in
912338fd1498Szrj 	 MASK since carries from those bits will affect the bits we are
912438fd1498Szrj 	 interested in.  */
912538fd1498Szrj       mask = fuller_mask;
912638fd1498Szrj       goto unop;
912738fd1498Szrj 
912838fd1498Szrj     case NOT:
912938fd1498Szrj       /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
913038fd1498Szrj 	 same as the XOR case above.  Ensure that the constant we form is not
913138fd1498Szrj 	 wider than the mode of X.  */
913238fd1498Szrj 
913338fd1498Szrj       if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
913438fd1498Szrj 	  && CONST_INT_P (XEXP (XEXP (x, 0), 1))
913538fd1498Szrj 	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
913638fd1498Szrj 	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
913738fd1498Szrj 	      < GET_MODE_PRECISION (xmode))
913838fd1498Szrj 	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
913938fd1498Szrj 	{
914038fd1498Szrj 	  temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), xmode);
914138fd1498Szrj 	  temp = simplify_gen_binary (XOR, xmode, XEXP (XEXP (x, 0), 0), temp);
914238fd1498Szrj 	  x = simplify_gen_binary (LSHIFTRT, xmode,
914338fd1498Szrj 				   temp, XEXP (XEXP (x, 0), 1));
914438fd1498Szrj 
914538fd1498Szrj 	  return force_to_mode (x, mode, mask, next_select);
914638fd1498Szrj 	}
914738fd1498Szrj 
914838fd1498Szrj       /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
914938fd1498Szrj 	 use the full mask inside the NOT.  */
915038fd1498Szrj       mask = fuller_mask;
915138fd1498Szrj 
915238fd1498Szrj     unop:
915338fd1498Szrj       op0 = gen_lowpart_or_truncate (op_mode,
915438fd1498Szrj 				     force_to_mode (XEXP (x, 0), mode, mask,
915538fd1498Szrj 						    next_select));
915638fd1498Szrj       if (op_mode != xmode || op0 != XEXP (x, 0))
915738fd1498Szrj 	{
915838fd1498Szrj 	  x = simplify_gen_unary (code, op_mode, op0, op_mode);
915938fd1498Szrj 	  xmode = op_mode;
916038fd1498Szrj 	}
916138fd1498Szrj       break;
916238fd1498Szrj 
916338fd1498Szrj     case NE:
916438fd1498Szrj       /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
916538fd1498Szrj 	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
916638fd1498Szrj 	 which is equal to STORE_FLAG_VALUE.  */
916738fd1498Szrj       if ((mask & ~STORE_FLAG_VALUE) == 0
916838fd1498Szrj 	  && XEXP (x, 1) == const0_rtx
916938fd1498Szrj 	  && GET_MODE (XEXP (x, 0)) == mode
917038fd1498Szrj 	  && pow2p_hwi (nonzero_bits (XEXP (x, 0), mode))
917138fd1498Szrj 	  && (nonzero_bits (XEXP (x, 0), mode)
917238fd1498Szrj 	      == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
917338fd1498Szrj 	return force_to_mode (XEXP (x, 0), mode, mask, next_select);
917438fd1498Szrj 
917538fd1498Szrj       break;
917638fd1498Szrj 
917738fd1498Szrj     case IF_THEN_ELSE:
917838fd1498Szrj       /* We have no way of knowing if the IF_THEN_ELSE can itself be
917938fd1498Szrj 	 written in a narrower mode.  We play it safe and do not do so.  */
918038fd1498Szrj 
918138fd1498Szrj       op0 = gen_lowpart_or_truncate (xmode,
918238fd1498Szrj 				     force_to_mode (XEXP (x, 1), mode,
918338fd1498Szrj 						    mask, next_select));
918438fd1498Szrj       op1 = gen_lowpart_or_truncate (xmode,
918538fd1498Szrj 				     force_to_mode (XEXP (x, 2), mode,
918638fd1498Szrj 						    mask, next_select));
918738fd1498Szrj       if (op0 != XEXP (x, 1) || op1 != XEXP (x, 2))
918838fd1498Szrj 	x = simplify_gen_ternary (IF_THEN_ELSE, xmode,
918938fd1498Szrj 				  GET_MODE (XEXP (x, 0)), XEXP (x, 0),
919038fd1498Szrj 				  op0, op1);
919138fd1498Szrj       break;
919238fd1498Szrj 
919338fd1498Szrj     default:
919438fd1498Szrj       break;
919538fd1498Szrj     }
919638fd1498Szrj 
919738fd1498Szrj   /* Ensure we return a value of the proper mode.  */
919838fd1498Szrj   return gen_lowpart_or_truncate (mode, x);
919938fd1498Szrj }
920038fd1498Szrj 
920138fd1498Szrj /* Return nonzero if X is an expression that has one of two values depending on
920238fd1498Szrj    whether some other value is zero or nonzero.  In that case, we return the
920338fd1498Szrj    value that is being tested, *PTRUE is set to the value if the rtx being
920438fd1498Szrj    returned has a nonzero value, and *PFALSE is set to the other alternative.
920538fd1498Szrj 
920638fd1498Szrj    If we return zero, we set *PTRUE and *PFALSE to X.  */
920738fd1498Szrj 
920838fd1498Szrj static rtx
if_then_else_cond(rtx x,rtx * ptrue,rtx * pfalse)920938fd1498Szrj if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
921038fd1498Szrj {
921138fd1498Szrj   machine_mode mode = GET_MODE (x);
921238fd1498Szrj   enum rtx_code code = GET_CODE (x);
921338fd1498Szrj   rtx cond0, cond1, true0, true1, false0, false1;
921438fd1498Szrj   unsigned HOST_WIDE_INT nz;
921538fd1498Szrj   scalar_int_mode int_mode;
921638fd1498Szrj 
921738fd1498Szrj   /* If we are comparing a value against zero, we are done.  */
921838fd1498Szrj   if ((code == NE || code == EQ)
921938fd1498Szrj       && XEXP (x, 1) == const0_rtx)
922038fd1498Szrj     {
922138fd1498Szrj       *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
922238fd1498Szrj       *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
922338fd1498Szrj       return XEXP (x, 0);
922438fd1498Szrj     }
922538fd1498Szrj 
922638fd1498Szrj   /* If this is a unary operation whose operand has one of two values, apply
922738fd1498Szrj      our opcode to compute those values.  */
922838fd1498Szrj   else if (UNARY_P (x)
922938fd1498Szrj 	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
923038fd1498Szrj     {
923138fd1498Szrj       *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
923238fd1498Szrj       *pfalse = simplify_gen_unary (code, mode, false0,
923338fd1498Szrj 				    GET_MODE (XEXP (x, 0)));
923438fd1498Szrj       return cond0;
923538fd1498Szrj     }
923638fd1498Szrj 
923738fd1498Szrj   /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
923838fd1498Szrj      make can't possibly match and would suppress other optimizations.  */
923938fd1498Szrj   else if (code == COMPARE)
924038fd1498Szrj     ;
924138fd1498Szrj 
924238fd1498Szrj   /* If this is a binary operation, see if either side has only one of two
924338fd1498Szrj      values.  If either one does or if both do and they are conditional on
924438fd1498Szrj      the same value, compute the new true and false values.  */
924538fd1498Szrj   else if (BINARY_P (x))
924638fd1498Szrj     {
924738fd1498Szrj       rtx op0 = XEXP (x, 0);
924838fd1498Szrj       rtx op1 = XEXP (x, 1);
924938fd1498Szrj       cond0 = if_then_else_cond (op0, &true0, &false0);
925038fd1498Szrj       cond1 = if_then_else_cond (op1, &true1, &false1);
925138fd1498Szrj 
925238fd1498Szrj       if ((cond0 != 0 && cond1 != 0 && !rtx_equal_p (cond0, cond1))
925338fd1498Szrj 	  && (REG_P (op0) || REG_P (op1)))
925438fd1498Szrj 	{
925538fd1498Szrj 	  /* Try to enable a simplification by undoing work done by
925638fd1498Szrj 	     if_then_else_cond if it converted a REG into something more
925738fd1498Szrj 	     complex.  */
925838fd1498Szrj 	  if (REG_P (op0))
925938fd1498Szrj 	    {
926038fd1498Szrj 	      cond0 = 0;
926138fd1498Szrj 	      true0 = false0 = op0;
926238fd1498Szrj 	    }
926338fd1498Szrj 	  else
926438fd1498Szrj 	    {
926538fd1498Szrj 	      cond1 = 0;
926638fd1498Szrj 	      true1 = false1 = op1;
926738fd1498Szrj 	    }
926838fd1498Szrj 	}
926938fd1498Szrj 
927038fd1498Szrj       if ((cond0 != 0 || cond1 != 0)
927138fd1498Szrj 	  && ! (cond0 != 0 && cond1 != 0 && !rtx_equal_p (cond0, cond1)))
927238fd1498Szrj 	{
927338fd1498Szrj 	  /* If if_then_else_cond returned zero, then true/false are the
927438fd1498Szrj 	     same rtl.  We must copy one of them to prevent invalid rtl
927538fd1498Szrj 	     sharing.  */
927638fd1498Szrj 	  if (cond0 == 0)
927738fd1498Szrj 	    true0 = copy_rtx (true0);
927838fd1498Szrj 	  else if (cond1 == 0)
927938fd1498Szrj 	    true1 = copy_rtx (true1);
928038fd1498Szrj 
928138fd1498Szrj 	  if (COMPARISON_P (x))
928238fd1498Szrj 	    {
928338fd1498Szrj 	      *ptrue = simplify_gen_relational (code, mode, VOIDmode,
928438fd1498Szrj 						true0, true1);
928538fd1498Szrj 	      *pfalse = simplify_gen_relational (code, mode, VOIDmode,
928638fd1498Szrj 						 false0, false1);
928738fd1498Szrj 	     }
928838fd1498Szrj 	  else
928938fd1498Szrj 	    {
929038fd1498Szrj 	      *ptrue = simplify_gen_binary (code, mode, true0, true1);
929138fd1498Szrj 	      *pfalse = simplify_gen_binary (code, mode, false0, false1);
929238fd1498Szrj 	    }
929338fd1498Szrj 
929438fd1498Szrj 	  return cond0 ? cond0 : cond1;
929538fd1498Szrj 	}
929638fd1498Szrj 
929738fd1498Szrj       /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
929838fd1498Szrj 	 operands is zero when the other is nonzero, and vice-versa,
929938fd1498Szrj 	 and STORE_FLAG_VALUE is 1 or -1.  */
930038fd1498Szrj 
930138fd1498Szrj       if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
930238fd1498Szrj 	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
930338fd1498Szrj 	      || code == UMAX)
930438fd1498Szrj 	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
930538fd1498Szrj 	{
930638fd1498Szrj 	  rtx op0 = XEXP (XEXP (x, 0), 1);
930738fd1498Szrj 	  rtx op1 = XEXP (XEXP (x, 1), 1);
930838fd1498Szrj 
930938fd1498Szrj 	  cond0 = XEXP (XEXP (x, 0), 0);
931038fd1498Szrj 	  cond1 = XEXP (XEXP (x, 1), 0);
931138fd1498Szrj 
931238fd1498Szrj 	  if (COMPARISON_P (cond0)
931338fd1498Szrj 	      && COMPARISON_P (cond1)
9314*58e805e6Szrj 	      && SCALAR_INT_MODE_P (mode)
931538fd1498Szrj 	      && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
931638fd1498Szrj 		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
931738fd1498Szrj 		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
931838fd1498Szrj 		  || ((swap_condition (GET_CODE (cond0))
931938fd1498Szrj 		       == reversed_comparison_code (cond1, NULL))
932038fd1498Szrj 		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
932138fd1498Szrj 		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
932238fd1498Szrj 	      && ! side_effects_p (x))
932338fd1498Szrj 	    {
932438fd1498Szrj 	      *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
932538fd1498Szrj 	      *pfalse = simplify_gen_binary (MULT, mode,
932638fd1498Szrj 					     (code == MINUS
932738fd1498Szrj 					      ? simplify_gen_unary (NEG, mode,
932838fd1498Szrj 								    op1, mode)
932938fd1498Szrj 					      : op1),
933038fd1498Szrj 					      const_true_rtx);
933138fd1498Szrj 	      return cond0;
933238fd1498Szrj 	    }
933338fd1498Szrj 	}
933438fd1498Szrj 
933538fd1498Szrj       /* Similarly for MULT, AND and UMIN, except that for these the result
933638fd1498Szrj 	 is always zero.  */
933738fd1498Szrj       if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
933838fd1498Szrj 	  && (code == MULT || code == AND || code == UMIN)
933938fd1498Szrj 	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
934038fd1498Szrj 	{
934138fd1498Szrj 	  cond0 = XEXP (XEXP (x, 0), 0);
934238fd1498Szrj 	  cond1 = XEXP (XEXP (x, 1), 0);
934338fd1498Szrj 
934438fd1498Szrj 	  if (COMPARISON_P (cond0)
934538fd1498Szrj 	      && COMPARISON_P (cond1)
934638fd1498Szrj 	      && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
934738fd1498Szrj 		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
934838fd1498Szrj 		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
934938fd1498Szrj 		  || ((swap_condition (GET_CODE (cond0))
935038fd1498Szrj 		       == reversed_comparison_code (cond1, NULL))
935138fd1498Szrj 		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
935238fd1498Szrj 		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
935338fd1498Szrj 	      && ! side_effects_p (x))
935438fd1498Szrj 	    {
935538fd1498Szrj 	      *ptrue = *pfalse = const0_rtx;
935638fd1498Szrj 	      return cond0;
935738fd1498Szrj 	    }
935838fd1498Szrj 	}
935938fd1498Szrj     }
936038fd1498Szrj 
936138fd1498Szrj   else if (code == IF_THEN_ELSE)
936238fd1498Szrj     {
936338fd1498Szrj       /* If we have IF_THEN_ELSE already, extract the condition and
936438fd1498Szrj 	 canonicalize it if it is NE or EQ.  */
936538fd1498Szrj       cond0 = XEXP (x, 0);
936638fd1498Szrj       *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
936738fd1498Szrj       if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
936838fd1498Szrj 	return XEXP (cond0, 0);
936938fd1498Szrj       else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
937038fd1498Szrj 	{
937138fd1498Szrj 	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
937238fd1498Szrj 	  return XEXP (cond0, 0);
937338fd1498Szrj 	}
937438fd1498Szrj       else
937538fd1498Szrj 	return cond0;
937638fd1498Szrj     }
937738fd1498Szrj 
937838fd1498Szrj   /* If X is a SUBREG, we can narrow both the true and false values
937938fd1498Szrj      if the inner expression, if there is a condition.  */
938038fd1498Szrj   else if (code == SUBREG
938138fd1498Szrj 	   && (cond0 = if_then_else_cond (SUBREG_REG (x), &true0,
938238fd1498Szrj 					  &false0)) != 0)
938338fd1498Szrj     {
938438fd1498Szrj       true0 = simplify_gen_subreg (mode, true0,
938538fd1498Szrj 				   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
938638fd1498Szrj       false0 = simplify_gen_subreg (mode, false0,
938738fd1498Szrj 				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
938838fd1498Szrj       if (true0 && false0)
938938fd1498Szrj 	{
939038fd1498Szrj 	  *ptrue = true0;
939138fd1498Szrj 	  *pfalse = false0;
939238fd1498Szrj 	  return cond0;
939338fd1498Szrj 	}
939438fd1498Szrj     }
939538fd1498Szrj 
939638fd1498Szrj   /* If X is a constant, this isn't special and will cause confusions
939738fd1498Szrj      if we treat it as such.  Likewise if it is equivalent to a constant.  */
939838fd1498Szrj   else if (CONSTANT_P (x)
939938fd1498Szrj 	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
940038fd1498Szrj     ;
940138fd1498Szrj 
940238fd1498Szrj   /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
940338fd1498Szrj      will be least confusing to the rest of the compiler.  */
940438fd1498Szrj   else if (mode == BImode)
940538fd1498Szrj     {
940638fd1498Szrj       *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
940738fd1498Szrj       return x;
940838fd1498Szrj     }
940938fd1498Szrj 
941038fd1498Szrj   /* If X is known to be either 0 or -1, those are the true and
941138fd1498Szrj      false values when testing X.  */
941238fd1498Szrj   else if (x == constm1_rtx || x == const0_rtx
941338fd1498Szrj 	   || (is_a <scalar_int_mode> (mode, &int_mode)
941438fd1498Szrj 	       && (num_sign_bit_copies (x, int_mode)
941538fd1498Szrj 		   == GET_MODE_PRECISION (int_mode))))
941638fd1498Szrj     {
941738fd1498Szrj       *ptrue = constm1_rtx, *pfalse = const0_rtx;
941838fd1498Szrj       return x;
941938fd1498Szrj     }
942038fd1498Szrj 
942138fd1498Szrj   /* Likewise for 0 or a single bit.  */
942238fd1498Szrj   else if (HWI_COMPUTABLE_MODE_P (mode)
942338fd1498Szrj 	   && pow2p_hwi (nz = nonzero_bits (x, mode)))
942438fd1498Szrj     {
942538fd1498Szrj       *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
942638fd1498Szrj       return x;
942738fd1498Szrj     }
942838fd1498Szrj 
942938fd1498Szrj   /* Otherwise fail; show no condition with true and false values the same.  */
943038fd1498Szrj   *ptrue = *pfalse = x;
943138fd1498Szrj   return 0;
943238fd1498Szrj }
943338fd1498Szrj 
943438fd1498Szrj /* Return the value of expression X given the fact that condition COND
943538fd1498Szrj    is known to be true when applied to REG as its first operand and VAL
943638fd1498Szrj    as its second.  X is known to not be shared and so can be modified in
943738fd1498Szrj    place.
943838fd1498Szrj 
943938fd1498Szrj    We only handle the simplest cases, and specifically those cases that
944038fd1498Szrj    arise with IF_THEN_ELSE expressions.  */
944138fd1498Szrj 
944238fd1498Szrj static rtx
known_cond(rtx x,enum rtx_code cond,rtx reg,rtx val)944338fd1498Szrj known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
944438fd1498Szrj {
944538fd1498Szrj   enum rtx_code code = GET_CODE (x);
944638fd1498Szrj   const char *fmt;
944738fd1498Szrj   int i, j;
944838fd1498Szrj 
944938fd1498Szrj   if (side_effects_p (x))
945038fd1498Szrj     return x;
945138fd1498Szrj 
945238fd1498Szrj   /* If either operand of the condition is a floating point value,
945338fd1498Szrj      then we have to avoid collapsing an EQ comparison.  */
945438fd1498Szrj   if (cond == EQ
945538fd1498Szrj       && rtx_equal_p (x, reg)
945638fd1498Szrj       && ! FLOAT_MODE_P (GET_MODE (x))
945738fd1498Szrj       && ! FLOAT_MODE_P (GET_MODE (val)))
945838fd1498Szrj     return val;
945938fd1498Szrj 
946038fd1498Szrj   if (cond == UNEQ && rtx_equal_p (x, reg))
946138fd1498Szrj     return val;
946238fd1498Szrj 
946338fd1498Szrj   /* If X is (abs REG) and we know something about REG's relationship
946438fd1498Szrj      with zero, we may be able to simplify this.  */
946538fd1498Szrj 
946638fd1498Szrj   if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
946738fd1498Szrj     switch (cond)
946838fd1498Szrj       {
946938fd1498Szrj       case GE:  case GT:  case EQ:
947038fd1498Szrj 	return XEXP (x, 0);
947138fd1498Szrj       case LT:  case LE:
947238fd1498Szrj 	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
947338fd1498Szrj 				   XEXP (x, 0),
947438fd1498Szrj 				   GET_MODE (XEXP (x, 0)));
947538fd1498Szrj       default:
947638fd1498Szrj 	break;
947738fd1498Szrj       }
947838fd1498Szrj 
947938fd1498Szrj   /* The only other cases we handle are MIN, MAX, and comparisons if the
948038fd1498Szrj      operands are the same as REG and VAL.  */
948138fd1498Szrj 
948238fd1498Szrj   else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
948338fd1498Szrj     {
948438fd1498Szrj       if (rtx_equal_p (XEXP (x, 0), val))
948538fd1498Szrj         {
948638fd1498Szrj 	  std::swap (val, reg);
948738fd1498Szrj 	  cond = swap_condition (cond);
948838fd1498Szrj         }
948938fd1498Szrj 
949038fd1498Szrj       if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
949138fd1498Szrj 	{
949238fd1498Szrj 	  if (COMPARISON_P (x))
949338fd1498Szrj 	    {
949438fd1498Szrj 	      if (comparison_dominates_p (cond, code))
9495*58e805e6Szrj 		return VECTOR_MODE_P (GET_MODE (x)) ? x : const_true_rtx;
949638fd1498Szrj 
949738fd1498Szrj 	      code = reversed_comparison_code (x, NULL);
949838fd1498Szrj 	      if (code != UNKNOWN
949938fd1498Szrj 		  && comparison_dominates_p (cond, code))
9500*58e805e6Szrj 		return CONST0_RTX (GET_MODE (x));
950138fd1498Szrj 	      else
950238fd1498Szrj 		return x;
950338fd1498Szrj 	    }
950438fd1498Szrj 	  else if (code == SMAX || code == SMIN
950538fd1498Szrj 		   || code == UMIN || code == UMAX)
950638fd1498Szrj 	    {
950738fd1498Szrj 	      int unsignedp = (code == UMIN || code == UMAX);
950838fd1498Szrj 
950938fd1498Szrj 	      /* Do not reverse the condition when it is NE or EQ.
951038fd1498Szrj 		 This is because we cannot conclude anything about
951138fd1498Szrj 		 the value of 'SMAX (x, y)' when x is not equal to y,
951238fd1498Szrj 		 but we can when x equals y.  */
951338fd1498Szrj 	      if ((code == SMAX || code == UMAX)
951438fd1498Szrj 		  && ! (cond == EQ || cond == NE))
951538fd1498Szrj 		cond = reverse_condition (cond);
951638fd1498Szrj 
951738fd1498Szrj 	      switch (cond)
951838fd1498Szrj 		{
951938fd1498Szrj 		case GE:   case GT:
952038fd1498Szrj 		  return unsignedp ? x : XEXP (x, 1);
952138fd1498Szrj 		case LE:   case LT:
952238fd1498Szrj 		  return unsignedp ? x : XEXP (x, 0);
952338fd1498Szrj 		case GEU:  case GTU:
952438fd1498Szrj 		  return unsignedp ? XEXP (x, 1) : x;
952538fd1498Szrj 		case LEU:  case LTU:
952638fd1498Szrj 		  return unsignedp ? XEXP (x, 0) : x;
952738fd1498Szrj 		default:
952838fd1498Szrj 		  break;
952938fd1498Szrj 		}
953038fd1498Szrj 	    }
953138fd1498Szrj 	}
953238fd1498Szrj     }
953338fd1498Szrj   else if (code == SUBREG)
953438fd1498Szrj     {
953538fd1498Szrj       machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
953638fd1498Szrj       rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
953738fd1498Szrj 
953838fd1498Szrj       if (SUBREG_REG (x) != r)
953938fd1498Szrj 	{
954038fd1498Szrj 	  /* We must simplify subreg here, before we lose track of the
954138fd1498Szrj 	     original inner_mode.  */
954238fd1498Szrj 	  new_rtx = simplify_subreg (GET_MODE (x), r,
954338fd1498Szrj 				     inner_mode, SUBREG_BYTE (x));
954438fd1498Szrj 	  if (new_rtx)
954538fd1498Szrj 	    return new_rtx;
954638fd1498Szrj 	  else
954738fd1498Szrj 	    SUBST (SUBREG_REG (x), r);
954838fd1498Szrj 	}
954938fd1498Szrj 
955038fd1498Szrj       return x;
955138fd1498Szrj     }
955238fd1498Szrj   /* We don't have to handle SIGN_EXTEND here, because even in the
955338fd1498Szrj      case of replacing something with a modeless CONST_INT, a
955438fd1498Szrj      CONST_INT is already (supposed to be) a valid sign extension for
955538fd1498Szrj      its narrower mode, which implies it's already properly
955638fd1498Szrj      sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
955738fd1498Szrj      story is different.  */
955838fd1498Szrj   else if (code == ZERO_EXTEND)
955938fd1498Szrj     {
956038fd1498Szrj       machine_mode inner_mode = GET_MODE (XEXP (x, 0));
956138fd1498Szrj       rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
956238fd1498Szrj 
956338fd1498Szrj       if (XEXP (x, 0) != r)
956438fd1498Szrj 	{
956538fd1498Szrj 	  /* We must simplify the zero_extend here, before we lose
956638fd1498Szrj 	     track of the original inner_mode.  */
956738fd1498Szrj 	  new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
956838fd1498Szrj 					      r, inner_mode);
956938fd1498Szrj 	  if (new_rtx)
957038fd1498Szrj 	    return new_rtx;
957138fd1498Szrj 	  else
957238fd1498Szrj 	    SUBST (XEXP (x, 0), r);
957338fd1498Szrj 	}
957438fd1498Szrj 
957538fd1498Szrj       return x;
957638fd1498Szrj     }
957738fd1498Szrj 
957838fd1498Szrj   fmt = GET_RTX_FORMAT (code);
957938fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
958038fd1498Szrj     {
958138fd1498Szrj       if (fmt[i] == 'e')
958238fd1498Szrj 	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
958338fd1498Szrj       else if (fmt[i] == 'E')
958438fd1498Szrj 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
958538fd1498Szrj 	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
958638fd1498Szrj 						cond, reg, val));
958738fd1498Szrj     }
958838fd1498Szrj 
958938fd1498Szrj   return x;
959038fd1498Szrj }
959138fd1498Szrj 
959238fd1498Szrj /* See if X and Y are equal for the purposes of seeing if we can rewrite an
959338fd1498Szrj    assignment as a field assignment.  */
959438fd1498Szrj 
959538fd1498Szrj static int
rtx_equal_for_field_assignment_p(rtx x,rtx y,bool widen_x)959638fd1498Szrj rtx_equal_for_field_assignment_p (rtx x, rtx y, bool widen_x)
959738fd1498Szrj {
959838fd1498Szrj   if (widen_x && GET_MODE (x) != GET_MODE (y))
959938fd1498Szrj     {
960038fd1498Szrj       if (paradoxical_subreg_p (GET_MODE (x), GET_MODE (y)))
960138fd1498Szrj 	return 0;
960238fd1498Szrj       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
960338fd1498Szrj 	return 0;
960438fd1498Szrj       x = adjust_address_nv (x, GET_MODE (y),
960538fd1498Szrj 			     byte_lowpart_offset (GET_MODE (y),
960638fd1498Szrj 						  GET_MODE (x)));
960738fd1498Szrj     }
960838fd1498Szrj 
960938fd1498Szrj   if (x == y || rtx_equal_p (x, y))
961038fd1498Szrj     return 1;
961138fd1498Szrj 
961238fd1498Szrj   if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
961338fd1498Szrj     return 0;
961438fd1498Szrj 
961538fd1498Szrj   /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
961638fd1498Szrj      Note that all SUBREGs of MEM are paradoxical; otherwise they
961738fd1498Szrj      would have been rewritten.  */
961838fd1498Szrj   if (MEM_P (x) && GET_CODE (y) == SUBREG
961938fd1498Szrj       && MEM_P (SUBREG_REG (y))
962038fd1498Szrj       && rtx_equal_p (SUBREG_REG (y),
962138fd1498Szrj 		      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
962238fd1498Szrj     return 1;
962338fd1498Szrj 
962438fd1498Szrj   if (MEM_P (y) && GET_CODE (x) == SUBREG
962538fd1498Szrj       && MEM_P (SUBREG_REG (x))
962638fd1498Szrj       && rtx_equal_p (SUBREG_REG (x),
962738fd1498Szrj 		      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
962838fd1498Szrj     return 1;
962938fd1498Szrj 
963038fd1498Szrj   /* We used to see if get_last_value of X and Y were the same but that's
963138fd1498Szrj      not correct.  In one direction, we'll cause the assignment to have
963238fd1498Szrj      the wrong destination and in the case, we'll import a register into this
963338fd1498Szrj      insn that might have already have been dead.   So fail if none of the
963438fd1498Szrj      above cases are true.  */
963538fd1498Szrj   return 0;
963638fd1498Szrj }
963738fd1498Szrj 
963838fd1498Szrj /* See if X, a SET operation, can be rewritten as a bit-field assignment.
963938fd1498Szrj    Return that assignment if so.
964038fd1498Szrj 
964138fd1498Szrj    We only handle the most common cases.  */
964238fd1498Szrj 
964338fd1498Szrj static rtx
make_field_assignment(rtx x)964438fd1498Szrj make_field_assignment (rtx x)
964538fd1498Szrj {
964638fd1498Szrj   rtx dest = SET_DEST (x);
964738fd1498Szrj   rtx src = SET_SRC (x);
964838fd1498Szrj   rtx assign;
964938fd1498Szrj   rtx rhs, lhs;
965038fd1498Szrj   HOST_WIDE_INT c1;
965138fd1498Szrj   HOST_WIDE_INT pos;
965238fd1498Szrj   unsigned HOST_WIDE_INT len;
965338fd1498Szrj   rtx other;
965438fd1498Szrj 
965538fd1498Szrj   /* All the rules in this function are specific to scalar integers.  */
965638fd1498Szrj   scalar_int_mode mode;
965738fd1498Szrj   if (!is_a <scalar_int_mode> (GET_MODE (dest), &mode))
965838fd1498Szrj     return x;
965938fd1498Szrj 
966038fd1498Szrj   /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
966138fd1498Szrj      a clear of a one-bit field.  We will have changed it to
966238fd1498Szrj      (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
966338fd1498Szrj      for a SUBREG.  */
966438fd1498Szrj 
966538fd1498Szrj   if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
966638fd1498Szrj       && CONST_INT_P (XEXP (XEXP (src, 0), 0))
966738fd1498Szrj       && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
966838fd1498Szrj       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
966938fd1498Szrj     {
967038fd1498Szrj       assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
967138fd1498Szrj 				1, 1, 1, 0);
967238fd1498Szrj       if (assign != 0)
967338fd1498Szrj 	return gen_rtx_SET (assign, const0_rtx);
967438fd1498Szrj       return x;
967538fd1498Szrj     }
967638fd1498Szrj 
967738fd1498Szrj   if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
967838fd1498Szrj       && subreg_lowpart_p (XEXP (src, 0))
967938fd1498Szrj       && partial_subreg_p (XEXP (src, 0))
968038fd1498Szrj       && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
968138fd1498Szrj       && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
968238fd1498Szrj       && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
968338fd1498Szrj       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
968438fd1498Szrj     {
968538fd1498Szrj       assign = make_extraction (VOIDmode, dest, 0,
968638fd1498Szrj 				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
968738fd1498Szrj 				1, 1, 1, 0);
968838fd1498Szrj       if (assign != 0)
968938fd1498Szrj 	return gen_rtx_SET (assign, const0_rtx);
969038fd1498Szrj       return x;
969138fd1498Szrj     }
969238fd1498Szrj 
969338fd1498Szrj   /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
969438fd1498Szrj      one-bit field.  */
969538fd1498Szrj   if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
969638fd1498Szrj       && XEXP (XEXP (src, 0), 0) == const1_rtx
969738fd1498Szrj       && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
969838fd1498Szrj     {
969938fd1498Szrj       assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
970038fd1498Szrj 				1, 1, 1, 0);
970138fd1498Szrj       if (assign != 0)
970238fd1498Szrj 	return gen_rtx_SET (assign, const1_rtx);
970338fd1498Szrj       return x;
970438fd1498Szrj     }
970538fd1498Szrj 
970638fd1498Szrj   /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
970738fd1498Szrj      SRC is an AND with all bits of that field set, then we can discard
970838fd1498Szrj      the AND.  */
970938fd1498Szrj   if (GET_CODE (dest) == ZERO_EXTRACT
971038fd1498Szrj       && CONST_INT_P (XEXP (dest, 1))
971138fd1498Szrj       && GET_CODE (src) == AND
971238fd1498Szrj       && CONST_INT_P (XEXP (src, 1)))
971338fd1498Szrj     {
971438fd1498Szrj       HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
971538fd1498Szrj       unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
971638fd1498Szrj       unsigned HOST_WIDE_INT ze_mask;
971738fd1498Szrj 
971838fd1498Szrj       if (width >= HOST_BITS_PER_WIDE_INT)
971938fd1498Szrj 	ze_mask = -1;
972038fd1498Szrj       else
972138fd1498Szrj 	ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
972238fd1498Szrj 
972338fd1498Szrj       /* Complete overlap.  We can remove the source AND.  */
972438fd1498Szrj       if ((and_mask & ze_mask) == ze_mask)
972538fd1498Szrj 	return gen_rtx_SET (dest, XEXP (src, 0));
972638fd1498Szrj 
972738fd1498Szrj       /* Partial overlap.  We can reduce the source AND.  */
972838fd1498Szrj       if ((and_mask & ze_mask) != and_mask)
972938fd1498Szrj 	{
973038fd1498Szrj 	  src = gen_rtx_AND (mode, XEXP (src, 0),
973138fd1498Szrj 			     gen_int_mode (and_mask & ze_mask, mode));
973238fd1498Szrj 	  return gen_rtx_SET (dest, src);
973338fd1498Szrj 	}
973438fd1498Szrj     }
973538fd1498Szrj 
973638fd1498Szrj   /* The other case we handle is assignments into a constant-position
973738fd1498Szrj      field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
973838fd1498Szrj      a mask that has all one bits except for a group of zero bits and
973938fd1498Szrj      OTHER is known to have zeros where C1 has ones, this is such an
974038fd1498Szrj      assignment.  Compute the position and length from C1.  Shift OTHER
974138fd1498Szrj      to the appropriate position, force it to the required mode, and
974238fd1498Szrj      make the extraction.  Check for the AND in both operands.  */
974338fd1498Szrj 
974438fd1498Szrj   /* One or more SUBREGs might obscure the constant-position field
974538fd1498Szrj      assignment.  The first one we are likely to encounter is an outer
974638fd1498Szrj      narrowing SUBREG, which we can just strip for the purposes of
974738fd1498Szrj      identifying the constant-field assignment.  */
974838fd1498Szrj   scalar_int_mode src_mode = mode;
974938fd1498Szrj   if (GET_CODE (src) == SUBREG
975038fd1498Szrj       && subreg_lowpart_p (src)
975138fd1498Szrj       && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (src)), &src_mode))
975238fd1498Szrj     src = SUBREG_REG (src);
975338fd1498Szrj 
975438fd1498Szrj   if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
975538fd1498Szrj     return x;
975638fd1498Szrj 
975738fd1498Szrj   rhs = expand_compound_operation (XEXP (src, 0));
975838fd1498Szrj   lhs = expand_compound_operation (XEXP (src, 1));
975938fd1498Szrj 
976038fd1498Szrj   if (GET_CODE (rhs) == AND
976138fd1498Szrj       && CONST_INT_P (XEXP (rhs, 1))
976238fd1498Szrj       && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
976338fd1498Szrj     c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
976438fd1498Szrj   /* The second SUBREG that might get in the way is a paradoxical
976538fd1498Szrj      SUBREG around the first operand of the AND.  We want to
976638fd1498Szrj      pretend the operand is as wide as the destination here.   We
976738fd1498Szrj      do this by adjusting the MEM to wider mode for the sole
976838fd1498Szrj      purpose of the call to rtx_equal_for_field_assignment_p.   Also
976938fd1498Szrj      note this trick only works for MEMs.  */
977038fd1498Szrj   else if (GET_CODE (rhs) == AND
977138fd1498Szrj 	   && paradoxical_subreg_p (XEXP (rhs, 0))
977238fd1498Szrj 	   && MEM_P (SUBREG_REG (XEXP (rhs, 0)))
977338fd1498Szrj 	   && CONST_INT_P (XEXP (rhs, 1))
977438fd1498Szrj 	   && rtx_equal_for_field_assignment_p (SUBREG_REG (XEXP (rhs, 0)),
977538fd1498Szrj 						dest, true))
977638fd1498Szrj     c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
977738fd1498Szrj   else if (GET_CODE (lhs) == AND
977838fd1498Szrj 	   && CONST_INT_P (XEXP (lhs, 1))
977938fd1498Szrj 	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
978038fd1498Szrj     c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
978138fd1498Szrj   /* The second SUBREG that might get in the way is a paradoxical
978238fd1498Szrj      SUBREG around the first operand of the AND.  We want to
978338fd1498Szrj      pretend the operand is as wide as the destination here.   We
978438fd1498Szrj      do this by adjusting the MEM to wider mode for the sole
978538fd1498Szrj      purpose of the call to rtx_equal_for_field_assignment_p.   Also
978638fd1498Szrj      note this trick only works for MEMs.  */
978738fd1498Szrj   else if (GET_CODE (lhs) == AND
978838fd1498Szrj 	   && paradoxical_subreg_p (XEXP (lhs, 0))
978938fd1498Szrj 	   && MEM_P (SUBREG_REG (XEXP (lhs, 0)))
979038fd1498Szrj 	   && CONST_INT_P (XEXP (lhs, 1))
979138fd1498Szrj 	   && rtx_equal_for_field_assignment_p (SUBREG_REG (XEXP (lhs, 0)),
979238fd1498Szrj 						dest, true))
979338fd1498Szrj     c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
979438fd1498Szrj   else
979538fd1498Szrj     return x;
979638fd1498Szrj 
979738fd1498Szrj   pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (mode), &len);
979838fd1498Szrj   if (pos < 0
979938fd1498Szrj       || pos + len > GET_MODE_PRECISION (mode)
980038fd1498Szrj       || GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT
980138fd1498Szrj       || (c1 & nonzero_bits (other, mode)) != 0)
980238fd1498Szrj     return x;
980338fd1498Szrj 
980438fd1498Szrj   assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
980538fd1498Szrj   if (assign == 0)
980638fd1498Szrj     return x;
980738fd1498Szrj 
980838fd1498Szrj   /* The mode to use for the source is the mode of the assignment, or of
980938fd1498Szrj      what is inside a possible STRICT_LOW_PART.  */
981038fd1498Szrj   machine_mode new_mode = (GET_CODE (assign) == STRICT_LOW_PART
981138fd1498Szrj 			   ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
981238fd1498Szrj 
981338fd1498Szrj   /* Shift OTHER right POS places and make it the source, restricting it
981438fd1498Szrj      to the proper length and mode.  */
981538fd1498Szrj 
981638fd1498Szrj   src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
981738fd1498Szrj 						     src_mode, other, pos),
981838fd1498Szrj 			       dest);
981938fd1498Szrj   src = force_to_mode (src, new_mode,
982038fd1498Szrj 		       len >= HOST_BITS_PER_WIDE_INT
982138fd1498Szrj 		       ? HOST_WIDE_INT_M1U
982238fd1498Szrj 		       : (HOST_WIDE_INT_1U << len) - 1,
982338fd1498Szrj 		       0);
982438fd1498Szrj 
982538fd1498Szrj   /* If SRC is masked by an AND that does not make a difference in
982638fd1498Szrj      the value being stored, strip it.  */
982738fd1498Szrj   if (GET_CODE (assign) == ZERO_EXTRACT
982838fd1498Szrj       && CONST_INT_P (XEXP (assign, 1))
982938fd1498Szrj       && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
983038fd1498Szrj       && GET_CODE (src) == AND
983138fd1498Szrj       && CONST_INT_P (XEXP (src, 1))
983238fd1498Szrj       && UINTVAL (XEXP (src, 1))
983338fd1498Szrj 	 == (HOST_WIDE_INT_1U << INTVAL (XEXP (assign, 1))) - 1)
983438fd1498Szrj     src = XEXP (src, 0);
983538fd1498Szrj 
983638fd1498Szrj   return gen_rtx_SET (assign, src);
983738fd1498Szrj }
983838fd1498Szrj 
983938fd1498Szrj /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
984038fd1498Szrj    if so.  */
984138fd1498Szrj 
984238fd1498Szrj static rtx
apply_distributive_law(rtx x)984338fd1498Szrj apply_distributive_law (rtx x)
984438fd1498Szrj {
984538fd1498Szrj   enum rtx_code code = GET_CODE (x);
984638fd1498Szrj   enum rtx_code inner_code;
984738fd1498Szrj   rtx lhs, rhs, other;
984838fd1498Szrj   rtx tem;
984938fd1498Szrj 
985038fd1498Szrj   /* Distributivity is not true for floating point as it can change the
985138fd1498Szrj      value.  So we don't do it unless -funsafe-math-optimizations.  */
985238fd1498Szrj   if (FLOAT_MODE_P (GET_MODE (x))
985338fd1498Szrj       && ! flag_unsafe_math_optimizations)
985438fd1498Szrj     return x;
985538fd1498Szrj 
985638fd1498Szrj   /* The outer operation can only be one of the following:  */
985738fd1498Szrj   if (code != IOR && code != AND && code != XOR
985838fd1498Szrj       && code != PLUS && code != MINUS)
985938fd1498Szrj     return x;
986038fd1498Szrj 
986138fd1498Szrj   lhs = XEXP (x, 0);
986238fd1498Szrj   rhs = XEXP (x, 1);
986338fd1498Szrj 
986438fd1498Szrj   /* If either operand is a primitive we can't do anything, so get out
986538fd1498Szrj      fast.  */
986638fd1498Szrj   if (OBJECT_P (lhs) || OBJECT_P (rhs))
986738fd1498Szrj     return x;
986838fd1498Szrj 
986938fd1498Szrj   lhs = expand_compound_operation (lhs);
987038fd1498Szrj   rhs = expand_compound_operation (rhs);
987138fd1498Szrj   inner_code = GET_CODE (lhs);
987238fd1498Szrj   if (inner_code != GET_CODE (rhs))
987338fd1498Szrj     return x;
987438fd1498Szrj 
987538fd1498Szrj   /* See if the inner and outer operations distribute.  */
987638fd1498Szrj   switch (inner_code)
987738fd1498Szrj     {
987838fd1498Szrj     case LSHIFTRT:
987938fd1498Szrj     case ASHIFTRT:
988038fd1498Szrj     case AND:
988138fd1498Szrj     case IOR:
988238fd1498Szrj       /* These all distribute except over PLUS.  */
988338fd1498Szrj       if (code == PLUS || code == MINUS)
988438fd1498Szrj 	return x;
988538fd1498Szrj       break;
988638fd1498Szrj 
988738fd1498Szrj     case MULT:
988838fd1498Szrj       if (code != PLUS && code != MINUS)
988938fd1498Szrj 	return x;
989038fd1498Szrj       break;
989138fd1498Szrj 
989238fd1498Szrj     case ASHIFT:
989338fd1498Szrj       /* This is also a multiply, so it distributes over everything.  */
989438fd1498Szrj       break;
989538fd1498Szrj 
989638fd1498Szrj     /* This used to handle SUBREG, but this turned out to be counter-
989738fd1498Szrj        productive, since (subreg (op ...)) usually is not handled by
989838fd1498Szrj        insn patterns, and this "optimization" therefore transformed
989938fd1498Szrj        recognizable patterns into unrecognizable ones.  Therefore the
990038fd1498Szrj        SUBREG case was removed from here.
990138fd1498Szrj 
990238fd1498Szrj        It is possible that distributing SUBREG over arithmetic operations
990338fd1498Szrj        leads to an intermediate result than can then be optimized further,
990438fd1498Szrj        e.g. by moving the outer SUBREG to the other side of a SET as done
990538fd1498Szrj        in simplify_set.  This seems to have been the original intent of
990638fd1498Szrj        handling SUBREGs here.
990738fd1498Szrj 
990838fd1498Szrj        However, with current GCC this does not appear to actually happen,
990938fd1498Szrj        at least on major platforms.  If some case is found where removing
991038fd1498Szrj        the SUBREG case here prevents follow-on optimizations, distributing
991138fd1498Szrj        SUBREGs ought to be re-added at that place, e.g. in simplify_set.  */
991238fd1498Szrj 
991338fd1498Szrj     default:
991438fd1498Szrj       return x;
991538fd1498Szrj     }
991638fd1498Szrj 
991738fd1498Szrj   /* Set LHS and RHS to the inner operands (A and B in the example
991838fd1498Szrj      above) and set OTHER to the common operand (C in the example).
991938fd1498Szrj      There is only one way to do this unless the inner operation is
992038fd1498Szrj      commutative.  */
992138fd1498Szrj   if (COMMUTATIVE_ARITH_P (lhs)
992238fd1498Szrj       && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
992338fd1498Szrj     other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
992438fd1498Szrj   else if (COMMUTATIVE_ARITH_P (lhs)
992538fd1498Szrj 	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
992638fd1498Szrj     other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
992738fd1498Szrj   else if (COMMUTATIVE_ARITH_P (lhs)
992838fd1498Szrj 	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
992938fd1498Szrj     other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
993038fd1498Szrj   else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
993138fd1498Szrj     other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
993238fd1498Szrj   else
993338fd1498Szrj     return x;
993438fd1498Szrj 
993538fd1498Szrj   /* Form the new inner operation, seeing if it simplifies first.  */
993638fd1498Szrj   tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
993738fd1498Szrj 
993838fd1498Szrj   /* There is one exception to the general way of distributing:
993938fd1498Szrj      (a | c) ^ (b | c) -> (a ^ b) & ~c  */
994038fd1498Szrj   if (code == XOR && inner_code == IOR)
994138fd1498Szrj     {
994238fd1498Szrj       inner_code = AND;
994338fd1498Szrj       other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
994438fd1498Szrj     }
994538fd1498Szrj 
994638fd1498Szrj   /* We may be able to continuing distributing the result, so call
994738fd1498Szrj      ourselves recursively on the inner operation before forming the
994838fd1498Szrj      outer operation, which we return.  */
994938fd1498Szrj   return simplify_gen_binary (inner_code, GET_MODE (x),
995038fd1498Szrj 			      apply_distributive_law (tem), other);
995138fd1498Szrj }
995238fd1498Szrj 
995338fd1498Szrj /* See if X is of the form (* (+ A B) C), and if so convert to
995438fd1498Szrj    (+ (* A C) (* B C)) and try to simplify.
995538fd1498Szrj 
995638fd1498Szrj    Most of the time, this results in no change.  However, if some of
995738fd1498Szrj    the operands are the same or inverses of each other, simplifications
995838fd1498Szrj    will result.
995938fd1498Szrj 
996038fd1498Szrj    For example, (and (ior A B) (not B)) can occur as the result of
996138fd1498Szrj    expanding a bit field assignment.  When we apply the distributive
996238fd1498Szrj    law to this, we get (ior (and (A (not B))) (and (B (not B)))),
996338fd1498Szrj    which then simplifies to (and (A (not B))).
996438fd1498Szrj 
996538fd1498Szrj    Note that no checks happen on the validity of applying the inverse
996638fd1498Szrj    distributive law.  This is pointless since we can do it in the
996738fd1498Szrj    few places where this routine is called.
996838fd1498Szrj 
996938fd1498Szrj    N is the index of the term that is decomposed (the arithmetic operation,
997038fd1498Szrj    i.e. (+ A B) in the first example above).  !N is the index of the term that
997138fd1498Szrj    is distributed, i.e. of C in the first example above.  */
997238fd1498Szrj static rtx
distribute_and_simplify_rtx(rtx x,int n)997338fd1498Szrj distribute_and_simplify_rtx (rtx x, int n)
997438fd1498Szrj {
997538fd1498Szrj   machine_mode mode;
997638fd1498Szrj   enum rtx_code outer_code, inner_code;
997738fd1498Szrj   rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
997838fd1498Szrj 
997938fd1498Szrj   /* Distributivity is not true for floating point as it can change the
998038fd1498Szrj      value.  So we don't do it unless -funsafe-math-optimizations.  */
998138fd1498Szrj   if (FLOAT_MODE_P (GET_MODE (x))
998238fd1498Szrj       && ! flag_unsafe_math_optimizations)
998338fd1498Szrj     return NULL_RTX;
998438fd1498Szrj 
998538fd1498Szrj   decomposed = XEXP (x, n);
998638fd1498Szrj   if (!ARITHMETIC_P (decomposed))
998738fd1498Szrj     return NULL_RTX;
998838fd1498Szrj 
998938fd1498Szrj   mode = GET_MODE (x);
999038fd1498Szrj   outer_code = GET_CODE (x);
999138fd1498Szrj   distributed = XEXP (x, !n);
999238fd1498Szrj 
999338fd1498Szrj   inner_code = GET_CODE (decomposed);
999438fd1498Szrj   inner_op0 = XEXP (decomposed, 0);
999538fd1498Szrj   inner_op1 = XEXP (decomposed, 1);
999638fd1498Szrj 
999738fd1498Szrj   /* Special case (and (xor B C) (not A)), which is equivalent to
999838fd1498Szrj      (xor (ior A B) (ior A C))  */
999938fd1498Szrj   if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
1000038fd1498Szrj     {
1000138fd1498Szrj       distributed = XEXP (distributed, 0);
1000238fd1498Szrj       outer_code = IOR;
1000338fd1498Szrj     }
1000438fd1498Szrj 
1000538fd1498Szrj   if (n == 0)
1000638fd1498Szrj     {
1000738fd1498Szrj       /* Distribute the second term.  */
1000838fd1498Szrj       new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
1000938fd1498Szrj       new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
1001038fd1498Szrj     }
1001138fd1498Szrj   else
1001238fd1498Szrj     {
1001338fd1498Szrj       /* Distribute the first term.  */
1001438fd1498Szrj       new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
1001538fd1498Szrj       new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
1001638fd1498Szrj     }
1001738fd1498Szrj 
1001838fd1498Szrj   tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
1001938fd1498Szrj 						     new_op0, new_op1));
1002038fd1498Szrj   if (GET_CODE (tmp) != outer_code
1002138fd1498Szrj       && (set_src_cost (tmp, mode, optimize_this_for_speed_p)
1002238fd1498Szrj 	  < set_src_cost (x, mode, optimize_this_for_speed_p)))
1002338fd1498Szrj     return tmp;
1002438fd1498Szrj 
1002538fd1498Szrj   return NULL_RTX;
1002638fd1498Szrj }
1002738fd1498Szrj 
1002838fd1498Szrj /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
1002938fd1498Szrj    in MODE.  Return an equivalent form, if different from (and VAROP
1003038fd1498Szrj    (const_int CONSTOP)).  Otherwise, return NULL_RTX.  */
1003138fd1498Szrj 
1003238fd1498Szrj static rtx
simplify_and_const_int_1(scalar_int_mode mode,rtx varop,unsigned HOST_WIDE_INT constop)1003338fd1498Szrj simplify_and_const_int_1 (scalar_int_mode mode, rtx varop,
1003438fd1498Szrj 			  unsigned HOST_WIDE_INT constop)
1003538fd1498Szrj {
1003638fd1498Szrj   unsigned HOST_WIDE_INT nonzero;
1003738fd1498Szrj   unsigned HOST_WIDE_INT orig_constop;
1003838fd1498Szrj   rtx orig_varop;
1003938fd1498Szrj   int i;
1004038fd1498Szrj 
1004138fd1498Szrj   orig_varop = varop;
1004238fd1498Szrj   orig_constop = constop;
1004338fd1498Szrj   if (GET_CODE (varop) == CLOBBER)
1004438fd1498Szrj     return NULL_RTX;
1004538fd1498Szrj 
1004638fd1498Szrj   /* Simplify VAROP knowing that we will be only looking at some of the
1004738fd1498Szrj      bits in it.
1004838fd1498Szrj 
1004938fd1498Szrj      Note by passing in CONSTOP, we guarantee that the bits not set in
1005038fd1498Szrj      CONSTOP are not significant and will never be examined.  We must
1005138fd1498Szrj      ensure that is the case by explicitly masking out those bits
1005238fd1498Szrj      before returning.  */
1005338fd1498Szrj   varop = force_to_mode (varop, mode, constop, 0);
1005438fd1498Szrj 
1005538fd1498Szrj   /* If VAROP is a CLOBBER, we will fail so return it.  */
1005638fd1498Szrj   if (GET_CODE (varop) == CLOBBER)
1005738fd1498Szrj     return varop;
1005838fd1498Szrj 
1005938fd1498Szrj   /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
1006038fd1498Szrj      to VAROP and return the new constant.  */
1006138fd1498Szrj   if (CONST_INT_P (varop))
1006238fd1498Szrj     return gen_int_mode (INTVAL (varop) & constop, mode);
1006338fd1498Szrj 
1006438fd1498Szrj   /* See what bits may be nonzero in VAROP.  Unlike the general case of
1006538fd1498Szrj      a call to nonzero_bits, here we don't care about bits outside
1006638fd1498Szrj      MODE.  */
1006738fd1498Szrj 
1006838fd1498Szrj   nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
1006938fd1498Szrj 
1007038fd1498Szrj   /* Turn off all bits in the constant that are known to already be zero.
1007138fd1498Szrj      Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
1007238fd1498Szrj      which is tested below.  */
1007338fd1498Szrj 
1007438fd1498Szrj   constop &= nonzero;
1007538fd1498Szrj 
1007638fd1498Szrj   /* If we don't have any bits left, return zero.  */
1007738fd1498Szrj   if (constop == 0)
1007838fd1498Szrj     return const0_rtx;
1007938fd1498Szrj 
1008038fd1498Szrj   /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
1008138fd1498Szrj      a power of two, we can replace this with an ASHIFT.  */
1008238fd1498Szrj   if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
1008338fd1498Szrj       && (i = exact_log2 (constop)) >= 0)
1008438fd1498Szrj     return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
1008538fd1498Szrj 
1008638fd1498Szrj   /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
1008738fd1498Szrj      or XOR, then try to apply the distributive law.  This may eliminate
1008838fd1498Szrj      operations if either branch can be simplified because of the AND.
1008938fd1498Szrj      It may also make some cases more complex, but those cases probably
1009038fd1498Szrj      won't match a pattern either with or without this.  */
1009138fd1498Szrj 
1009238fd1498Szrj   if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
1009338fd1498Szrj     {
1009438fd1498Szrj       scalar_int_mode varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
1009538fd1498Szrj       return
1009638fd1498Szrj 	gen_lowpart
1009738fd1498Szrj 	  (mode,
1009838fd1498Szrj 	   apply_distributive_law
1009938fd1498Szrj 	   (simplify_gen_binary (GET_CODE (varop), varop_mode,
1010038fd1498Szrj 				 simplify_and_const_int (NULL_RTX, varop_mode,
1010138fd1498Szrj 							 XEXP (varop, 0),
1010238fd1498Szrj 							 constop),
1010338fd1498Szrj 				 simplify_and_const_int (NULL_RTX, varop_mode,
1010438fd1498Szrj 							 XEXP (varop, 1),
1010538fd1498Szrj 							 constop))));
1010638fd1498Szrj     }
1010738fd1498Szrj 
1010838fd1498Szrj   /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
1010938fd1498Szrj      the AND and see if one of the operands simplifies to zero.  If so, we
1011038fd1498Szrj      may eliminate it.  */
1011138fd1498Szrj 
1011238fd1498Szrj   if (GET_CODE (varop) == PLUS
1011338fd1498Szrj       && pow2p_hwi (constop + 1))
1011438fd1498Szrj     {
1011538fd1498Szrj       rtx o0, o1;
1011638fd1498Szrj 
1011738fd1498Szrj       o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
1011838fd1498Szrj       o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
1011938fd1498Szrj       if (o0 == const0_rtx)
1012038fd1498Szrj 	return o1;
1012138fd1498Szrj       if (o1 == const0_rtx)
1012238fd1498Szrj 	return o0;
1012338fd1498Szrj     }
1012438fd1498Szrj 
1012538fd1498Szrj   /* Make a SUBREG if necessary.  If we can't make it, fail.  */
1012638fd1498Szrj   varop = gen_lowpart (mode, varop);
1012738fd1498Szrj   if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
1012838fd1498Szrj     return NULL_RTX;
1012938fd1498Szrj 
1013038fd1498Szrj   /* If we are only masking insignificant bits, return VAROP.  */
1013138fd1498Szrj   if (constop == nonzero)
1013238fd1498Szrj     return varop;
1013338fd1498Szrj 
1013438fd1498Szrj   if (varop == orig_varop && constop == orig_constop)
1013538fd1498Szrj     return NULL_RTX;
1013638fd1498Szrj 
1013738fd1498Szrj   /* Otherwise, return an AND.  */
1013838fd1498Szrj   return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
1013938fd1498Szrj }
1014038fd1498Szrj 
1014138fd1498Szrj 
1014238fd1498Szrj /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
1014338fd1498Szrj    in MODE.
1014438fd1498Szrj 
1014538fd1498Szrj    Return an equivalent form, if different from X.  Otherwise, return X.  If
1014638fd1498Szrj    X is zero, we are to always construct the equivalent form.  */
1014738fd1498Szrj 
1014838fd1498Szrj static rtx
simplify_and_const_int(rtx x,scalar_int_mode mode,rtx varop,unsigned HOST_WIDE_INT constop)1014938fd1498Szrj simplify_and_const_int (rtx x, scalar_int_mode mode, rtx varop,
1015038fd1498Szrj 			unsigned HOST_WIDE_INT constop)
1015138fd1498Szrj {
1015238fd1498Szrj   rtx tem = simplify_and_const_int_1 (mode, varop, constop);
1015338fd1498Szrj   if (tem)
1015438fd1498Szrj     return tem;
1015538fd1498Szrj 
1015638fd1498Szrj   if (!x)
1015738fd1498Szrj     x = simplify_gen_binary (AND, GET_MODE (varop), varop,
1015838fd1498Szrj 			     gen_int_mode (constop, mode));
1015938fd1498Szrj   if (GET_MODE (x) != mode)
1016038fd1498Szrj     x = gen_lowpart (mode, x);
1016138fd1498Szrj   return x;
1016238fd1498Szrj }
1016338fd1498Szrj 
1016438fd1498Szrj /* Given a REG X of mode XMODE, compute which bits in X can be nonzero.
1016538fd1498Szrj    We don't care about bits outside of those defined in MODE.
1016638fd1498Szrj 
1016738fd1498Szrj    For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
1016838fd1498Szrj    a shift, AND, or zero_extract, we can do better.  */
1016938fd1498Szrj 
1017038fd1498Szrj static rtx
reg_nonzero_bits_for_combine(const_rtx x,scalar_int_mode xmode,scalar_int_mode mode,unsigned HOST_WIDE_INT * nonzero)1017138fd1498Szrj reg_nonzero_bits_for_combine (const_rtx x, scalar_int_mode xmode,
1017238fd1498Szrj 			      scalar_int_mode mode,
1017338fd1498Szrj 			      unsigned HOST_WIDE_INT *nonzero)
1017438fd1498Szrj {
1017538fd1498Szrj   rtx tem;
1017638fd1498Szrj   reg_stat_type *rsp;
1017738fd1498Szrj 
1017838fd1498Szrj   /* If X is a register whose nonzero bits value is current, use it.
1017938fd1498Szrj      Otherwise, if X is a register whose value we can find, use that
1018038fd1498Szrj      value.  Otherwise, use the previously-computed global nonzero bits
1018138fd1498Szrj      for this register.  */
1018238fd1498Szrj 
1018338fd1498Szrj   rsp = &reg_stat[REGNO (x)];
1018438fd1498Szrj   if (rsp->last_set_value != 0
1018538fd1498Szrj       && (rsp->last_set_mode == mode
10186*58e805e6Szrj 	  || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10187*58e805e6Szrj 	      && GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
1018838fd1498Szrj 	      && GET_MODE_CLASS (mode) == MODE_INT))
1018938fd1498Szrj       && ((rsp->last_set_label >= label_tick_ebb_start
1019038fd1498Szrj 	   && rsp->last_set_label < label_tick)
1019138fd1498Szrj 	  || (rsp->last_set_label == label_tick
1019238fd1498Szrj               && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
1019338fd1498Szrj 	  || (REGNO (x) >= FIRST_PSEUDO_REGISTER
1019438fd1498Szrj 	      && REGNO (x) < reg_n_sets_max
1019538fd1498Szrj 	      && REG_N_SETS (REGNO (x)) == 1
1019638fd1498Szrj 	      && !REGNO_REG_SET_P
1019738fd1498Szrj 		  (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
1019838fd1498Szrj 		   REGNO (x)))))
1019938fd1498Szrj     {
1020038fd1498Szrj       /* Note that, even if the precision of last_set_mode is lower than that
1020138fd1498Szrj 	 of mode, record_value_for_reg invoked nonzero_bits on the register
1020238fd1498Szrj 	 with nonzero_bits_mode (because last_set_mode is necessarily integral
1020338fd1498Szrj 	 and HWI_COMPUTABLE_MODE_P in this case) so bits in nonzero_bits_mode
1020438fd1498Szrj 	 are all valid, hence in mode too since nonzero_bits_mode is defined
1020538fd1498Szrj 	 to the largest HWI_COMPUTABLE_MODE_P mode.  */
1020638fd1498Szrj       *nonzero &= rsp->last_set_nonzero_bits;
1020738fd1498Szrj       return NULL;
1020838fd1498Szrj     }
1020938fd1498Szrj 
1021038fd1498Szrj   tem = get_last_value (x);
1021138fd1498Szrj   if (tem)
1021238fd1498Szrj     {
1021338fd1498Szrj       if (SHORT_IMMEDIATES_SIGN_EXTEND)
1021438fd1498Szrj 	tem = sign_extend_short_imm (tem, xmode, GET_MODE_PRECISION (mode));
1021538fd1498Szrj 
1021638fd1498Szrj       return tem;
1021738fd1498Szrj     }
1021838fd1498Szrj 
1021938fd1498Szrj   if (nonzero_sign_valid && rsp->nonzero_bits)
1022038fd1498Szrj     {
1022138fd1498Szrj       unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
1022238fd1498Szrj 
1022338fd1498Szrj       if (GET_MODE_PRECISION (xmode) < GET_MODE_PRECISION (mode))
1022438fd1498Szrj 	/* We don't know anything about the upper bits.  */
1022538fd1498Szrj 	mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (xmode);
1022638fd1498Szrj 
1022738fd1498Szrj       *nonzero &= mask;
1022838fd1498Szrj     }
1022938fd1498Szrj 
1023038fd1498Szrj   return NULL;
1023138fd1498Szrj }
1023238fd1498Szrj 
1023338fd1498Szrj /* Given a reg X of mode XMODE, return the number of bits at the high-order
1023438fd1498Szrj    end of X that are known to be equal to the sign bit.  X will be used
1023538fd1498Szrj    in mode MODE; the returned value will always be between 1 and the
1023638fd1498Szrj    number of bits in MODE.  */
1023738fd1498Szrj 
1023838fd1498Szrj static rtx
reg_num_sign_bit_copies_for_combine(const_rtx x,scalar_int_mode xmode,scalar_int_mode mode,unsigned int * result)1023938fd1498Szrj reg_num_sign_bit_copies_for_combine (const_rtx x, scalar_int_mode xmode,
1024038fd1498Szrj 				     scalar_int_mode mode,
1024138fd1498Szrj 				     unsigned int *result)
1024238fd1498Szrj {
1024338fd1498Szrj   rtx tem;
1024438fd1498Szrj   reg_stat_type *rsp;
1024538fd1498Szrj 
1024638fd1498Szrj   rsp = &reg_stat[REGNO (x)];
1024738fd1498Szrj   if (rsp->last_set_value != 0
1024838fd1498Szrj       && rsp->last_set_mode == mode
1024938fd1498Szrj       && ((rsp->last_set_label >= label_tick_ebb_start
1025038fd1498Szrj 	   && rsp->last_set_label < label_tick)
1025138fd1498Szrj 	  || (rsp->last_set_label == label_tick
1025238fd1498Szrj               && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
1025338fd1498Szrj 	  || (REGNO (x) >= FIRST_PSEUDO_REGISTER
1025438fd1498Szrj 	      && REGNO (x) < reg_n_sets_max
1025538fd1498Szrj 	      && REG_N_SETS (REGNO (x)) == 1
1025638fd1498Szrj 	      && !REGNO_REG_SET_P
1025738fd1498Szrj 		  (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
1025838fd1498Szrj 		   REGNO (x)))))
1025938fd1498Szrj     {
1026038fd1498Szrj       *result = rsp->last_set_sign_bit_copies;
1026138fd1498Szrj       return NULL;
1026238fd1498Szrj     }
1026338fd1498Szrj 
1026438fd1498Szrj   tem = get_last_value (x);
1026538fd1498Szrj   if (tem != 0)
1026638fd1498Szrj     return tem;
1026738fd1498Szrj 
1026838fd1498Szrj   if (nonzero_sign_valid && rsp->sign_bit_copies != 0
1026938fd1498Szrj       && GET_MODE_PRECISION (xmode) == GET_MODE_PRECISION (mode))
1027038fd1498Szrj     *result = rsp->sign_bit_copies;
1027138fd1498Szrj 
1027238fd1498Szrj   return NULL;
1027338fd1498Szrj }
1027438fd1498Szrj 
1027538fd1498Szrj /* Return the number of "extended" bits there are in X, when interpreted
1027638fd1498Szrj    as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
1027738fd1498Szrj    unsigned quantities, this is the number of high-order zero bits.
1027838fd1498Szrj    For signed quantities, this is the number of copies of the sign bit
1027938fd1498Szrj    minus 1.  In both case, this function returns the number of "spare"
1028038fd1498Szrj    bits.  For example, if two quantities for which this function returns
1028138fd1498Szrj    at least 1 are added, the addition is known not to overflow.
1028238fd1498Szrj 
1028338fd1498Szrj    This function will always return 0 unless called during combine, which
1028438fd1498Szrj    implies that it must be called from a define_split.  */
1028538fd1498Szrj 
1028638fd1498Szrj unsigned int
extended_count(const_rtx x,machine_mode mode,int unsignedp)1028738fd1498Szrj extended_count (const_rtx x, machine_mode mode, int unsignedp)
1028838fd1498Szrj {
1028938fd1498Szrj   if (nonzero_sign_valid == 0)
1029038fd1498Szrj     return 0;
1029138fd1498Szrj 
1029238fd1498Szrj   scalar_int_mode int_mode;
1029338fd1498Szrj   return (unsignedp
1029438fd1498Szrj 	  ? (is_a <scalar_int_mode> (mode, &int_mode)
1029538fd1498Szrj 	     && HWI_COMPUTABLE_MODE_P (int_mode)
1029638fd1498Szrj 	     ? (unsigned int) (GET_MODE_PRECISION (int_mode) - 1
1029738fd1498Szrj 			       - floor_log2 (nonzero_bits (x, int_mode)))
1029838fd1498Szrj 	     : 0)
1029938fd1498Szrj 	  : num_sign_bit_copies (x, mode) - 1);
1030038fd1498Szrj }
1030138fd1498Szrj 
1030238fd1498Szrj /* This function is called from `simplify_shift_const' to merge two
1030338fd1498Szrj    outer operations.  Specifically, we have already found that we need
1030438fd1498Szrj    to perform operation *POP0 with constant *PCONST0 at the outermost
1030538fd1498Szrj    position.  We would now like to also perform OP1 with constant CONST1
1030638fd1498Szrj    (with *POP0 being done last).
1030738fd1498Szrj 
1030838fd1498Szrj    Return 1 if we can do the operation and update *POP0 and *PCONST0 with
1030938fd1498Szrj    the resulting operation.  *PCOMP_P is set to 1 if we would need to
1031038fd1498Szrj    complement the innermost operand, otherwise it is unchanged.
1031138fd1498Szrj 
1031238fd1498Szrj    MODE is the mode in which the operation will be done.  No bits outside
1031338fd1498Szrj    the width of this mode matter.  It is assumed that the width of this mode
1031438fd1498Szrj    is smaller than or equal to HOST_BITS_PER_WIDE_INT.
1031538fd1498Szrj 
1031638fd1498Szrj    If *POP0 or OP1 are UNKNOWN, it means no operation is required.  Only NEG, PLUS,
1031738fd1498Szrj    IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
1031838fd1498Szrj    result is simply *PCONST0.
1031938fd1498Szrj 
1032038fd1498Szrj    If the resulting operation cannot be expressed as one operation, we
1032138fd1498Szrj    return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
1032238fd1498Szrj 
1032338fd1498Szrj static int
merge_outer_ops(enum rtx_code * pop0,HOST_WIDE_INT * pconst0,enum rtx_code op1,HOST_WIDE_INT const1,machine_mode mode,int * pcomp_p)1032438fd1498Szrj merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, machine_mode mode, int *pcomp_p)
1032538fd1498Szrj {
1032638fd1498Szrj   enum rtx_code op0 = *pop0;
1032738fd1498Szrj   HOST_WIDE_INT const0 = *pconst0;
1032838fd1498Szrj 
1032938fd1498Szrj   const0 &= GET_MODE_MASK (mode);
1033038fd1498Szrj   const1 &= GET_MODE_MASK (mode);
1033138fd1498Szrj 
1033238fd1498Szrj   /* If OP0 is an AND, clear unimportant bits in CONST1.  */
1033338fd1498Szrj   if (op0 == AND)
1033438fd1498Szrj     const1 &= const0;
1033538fd1498Szrj 
1033638fd1498Szrj   /* If OP0 or OP1 is UNKNOWN, this is easy.  Similarly if they are the same or
1033738fd1498Szrj      if OP0 is SET.  */
1033838fd1498Szrj 
1033938fd1498Szrj   if (op1 == UNKNOWN || op0 == SET)
1034038fd1498Szrj     return 1;
1034138fd1498Szrj 
1034238fd1498Szrj   else if (op0 == UNKNOWN)
1034338fd1498Szrj     op0 = op1, const0 = const1;
1034438fd1498Szrj 
1034538fd1498Szrj   else if (op0 == op1)
1034638fd1498Szrj     {
1034738fd1498Szrj       switch (op0)
1034838fd1498Szrj 	{
1034938fd1498Szrj 	case AND:
1035038fd1498Szrj 	  const0 &= const1;
1035138fd1498Szrj 	  break;
1035238fd1498Szrj 	case IOR:
1035338fd1498Szrj 	  const0 |= const1;
1035438fd1498Szrj 	  break;
1035538fd1498Szrj 	case XOR:
1035638fd1498Szrj 	  const0 ^= const1;
1035738fd1498Szrj 	  break;
1035838fd1498Szrj 	case PLUS:
1035938fd1498Szrj 	  const0 += const1;
1036038fd1498Szrj 	  break;
1036138fd1498Szrj 	case NEG:
1036238fd1498Szrj 	  op0 = UNKNOWN;
1036338fd1498Szrj 	  break;
1036438fd1498Szrj 	default:
1036538fd1498Szrj 	  break;
1036638fd1498Szrj 	}
1036738fd1498Szrj     }
1036838fd1498Szrj 
1036938fd1498Szrj   /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
1037038fd1498Szrj   else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
1037138fd1498Szrj     return 0;
1037238fd1498Szrj 
1037338fd1498Szrj   /* If the two constants aren't the same, we can't do anything.  The
1037438fd1498Szrj      remaining six cases can all be done.  */
1037538fd1498Szrj   else if (const0 != const1)
1037638fd1498Szrj     return 0;
1037738fd1498Szrj 
1037838fd1498Szrj   else
1037938fd1498Szrj     switch (op0)
1038038fd1498Szrj       {
1038138fd1498Szrj       case IOR:
1038238fd1498Szrj 	if (op1 == AND)
1038338fd1498Szrj 	  /* (a & b) | b == b */
1038438fd1498Szrj 	  op0 = SET;
1038538fd1498Szrj 	else /* op1 == XOR */
1038638fd1498Szrj 	  /* (a ^ b) | b == a | b */
1038738fd1498Szrj 	  {;}
1038838fd1498Szrj 	break;
1038938fd1498Szrj 
1039038fd1498Szrj       case XOR:
1039138fd1498Szrj 	if (op1 == AND)
1039238fd1498Szrj 	  /* (a & b) ^ b == (~a) & b */
1039338fd1498Szrj 	  op0 = AND, *pcomp_p = 1;
1039438fd1498Szrj 	else /* op1 == IOR */
1039538fd1498Szrj 	  /* (a | b) ^ b == a & ~b */
1039638fd1498Szrj 	  op0 = AND, const0 = ~const0;
1039738fd1498Szrj 	break;
1039838fd1498Szrj 
1039938fd1498Szrj       case AND:
1040038fd1498Szrj 	if (op1 == IOR)
1040138fd1498Szrj 	  /* (a | b) & b == b */
1040238fd1498Szrj 	op0 = SET;
1040338fd1498Szrj 	else /* op1 == XOR */
1040438fd1498Szrj 	  /* (a ^ b) & b) == (~a) & b */
1040538fd1498Szrj 	  *pcomp_p = 1;
1040638fd1498Szrj 	break;
1040738fd1498Szrj       default:
1040838fd1498Szrj 	break;
1040938fd1498Szrj       }
1041038fd1498Szrj 
1041138fd1498Szrj   /* Check for NO-OP cases.  */
1041238fd1498Szrj   const0 &= GET_MODE_MASK (mode);
1041338fd1498Szrj   if (const0 == 0
1041438fd1498Szrj       && (op0 == IOR || op0 == XOR || op0 == PLUS))
1041538fd1498Szrj     op0 = UNKNOWN;
1041638fd1498Szrj   else if (const0 == 0 && op0 == AND)
1041738fd1498Szrj     op0 = SET;
1041838fd1498Szrj   else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
1041938fd1498Szrj 	   && op0 == AND)
1042038fd1498Szrj     op0 = UNKNOWN;
1042138fd1498Szrj 
1042238fd1498Szrj   *pop0 = op0;
1042338fd1498Szrj 
1042438fd1498Szrj   /* ??? Slightly redundant with the above mask, but not entirely.
1042538fd1498Szrj      Moving this above means we'd have to sign-extend the mode mask
1042638fd1498Szrj      for the final test.  */
1042738fd1498Szrj   if (op0 != UNKNOWN && op0 != NEG)
1042838fd1498Szrj     *pconst0 = trunc_int_for_mode (const0, mode);
1042938fd1498Szrj 
1043038fd1498Szrj   return 1;
1043138fd1498Szrj }
1043238fd1498Szrj 
1043338fd1498Szrj /* A helper to simplify_shift_const_1 to determine the mode we can perform
1043438fd1498Szrj    the shift in.  The original shift operation CODE is performed on OP in
1043538fd1498Szrj    ORIG_MODE.  Return the wider mode MODE if we can perform the operation
1043638fd1498Szrj    in that mode.  Return ORIG_MODE otherwise.  We can also assume that the
1043738fd1498Szrj    result of the shift is subject to operation OUTER_CODE with operand
1043838fd1498Szrj    OUTER_CONST.  */
1043938fd1498Szrj 
1044038fd1498Szrj static scalar_int_mode
try_widen_shift_mode(enum rtx_code code,rtx op,int count,scalar_int_mode orig_mode,scalar_int_mode mode,enum rtx_code outer_code,HOST_WIDE_INT outer_const)1044138fd1498Szrj try_widen_shift_mode (enum rtx_code code, rtx op, int count,
1044238fd1498Szrj 		      scalar_int_mode orig_mode, scalar_int_mode mode,
1044338fd1498Szrj 		      enum rtx_code outer_code, HOST_WIDE_INT outer_const)
1044438fd1498Szrj {
1044538fd1498Szrj   gcc_assert (GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (orig_mode));
1044638fd1498Szrj 
1044738fd1498Szrj   /* In general we can't perform in wider mode for right shift and rotate.  */
1044838fd1498Szrj   switch (code)
1044938fd1498Szrj     {
1045038fd1498Szrj     case ASHIFTRT:
1045138fd1498Szrj       /* We can still widen if the bits brought in from the left are identical
1045238fd1498Szrj 	 to the sign bit of ORIG_MODE.  */
1045338fd1498Szrj       if (num_sign_bit_copies (op, mode)
1045438fd1498Szrj 	  > (unsigned) (GET_MODE_PRECISION (mode)
1045538fd1498Szrj 			- GET_MODE_PRECISION (orig_mode)))
1045638fd1498Szrj 	return mode;
1045738fd1498Szrj       return orig_mode;
1045838fd1498Szrj 
1045938fd1498Szrj     case LSHIFTRT:
1046038fd1498Szrj       /* Similarly here but with zero bits.  */
1046138fd1498Szrj       if (HWI_COMPUTABLE_MODE_P (mode)
1046238fd1498Szrj 	  && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
1046338fd1498Szrj 	return mode;
1046438fd1498Szrj 
1046538fd1498Szrj       /* We can also widen if the bits brought in will be masked off.  This
1046638fd1498Szrj 	 operation is performed in ORIG_MODE.  */
1046738fd1498Szrj       if (outer_code == AND)
1046838fd1498Szrj 	{
1046938fd1498Szrj 	  int care_bits = low_bitmask_len (orig_mode, outer_const);
1047038fd1498Szrj 
1047138fd1498Szrj 	  if (care_bits >= 0
1047238fd1498Szrj 	      && GET_MODE_PRECISION (orig_mode) - care_bits >= count)
1047338fd1498Szrj 	    return mode;
1047438fd1498Szrj 	}
1047538fd1498Szrj       /* fall through */
1047638fd1498Szrj 
1047738fd1498Szrj     case ROTATE:
1047838fd1498Szrj       return orig_mode;
1047938fd1498Szrj 
1048038fd1498Szrj     case ROTATERT:
1048138fd1498Szrj       gcc_unreachable ();
1048238fd1498Szrj 
1048338fd1498Szrj     default:
1048438fd1498Szrj       return mode;
1048538fd1498Szrj     }
1048638fd1498Szrj }
1048738fd1498Szrj 
1048838fd1498Szrj /* Simplify a shift of VAROP by ORIG_COUNT bits.  CODE says what kind
1048938fd1498Szrj    of shift.  The result of the shift is RESULT_MODE.  Return NULL_RTX
1049038fd1498Szrj    if we cannot simplify it.  Otherwise, return a simplified value.
1049138fd1498Szrj 
1049238fd1498Szrj    The shift is normally computed in the widest mode we find in VAROP, as
1049338fd1498Szrj    long as it isn't a different number of words than RESULT_MODE.  Exceptions
1049438fd1498Szrj    are ASHIFTRT and ROTATE, which are always done in their original mode.  */
1049538fd1498Szrj 
1049638fd1498Szrj static rtx
simplify_shift_const_1(enum rtx_code code,machine_mode result_mode,rtx varop,int orig_count)1049738fd1498Szrj simplify_shift_const_1 (enum rtx_code code, machine_mode result_mode,
1049838fd1498Szrj 			rtx varop, int orig_count)
1049938fd1498Szrj {
1050038fd1498Szrj   enum rtx_code orig_code = code;
1050138fd1498Szrj   rtx orig_varop = varop;
1050238fd1498Szrj   int count, log2;
1050338fd1498Szrj   machine_mode mode = result_mode;
1050438fd1498Szrj   machine_mode shift_mode;
1050538fd1498Szrj   scalar_int_mode tmode, inner_mode, int_mode, int_varop_mode, int_result_mode;
1050638fd1498Szrj   /* We form (outer_op (code varop count) (outer_const)).  */
1050738fd1498Szrj   enum rtx_code outer_op = UNKNOWN;
1050838fd1498Szrj   HOST_WIDE_INT outer_const = 0;
1050938fd1498Szrj   int complement_p = 0;
1051038fd1498Szrj   rtx new_rtx, x;
1051138fd1498Szrj 
1051238fd1498Szrj   /* Make sure and truncate the "natural" shift on the way in.  We don't
1051338fd1498Szrj      want to do this inside the loop as it makes it more difficult to
1051438fd1498Szrj      combine shifts.  */
1051538fd1498Szrj   if (SHIFT_COUNT_TRUNCATED)
1051638fd1498Szrj     orig_count &= GET_MODE_UNIT_BITSIZE (mode) - 1;
1051738fd1498Szrj 
1051838fd1498Szrj   /* If we were given an invalid count, don't do anything except exactly
1051938fd1498Szrj      what was requested.  */
1052038fd1498Szrj 
1052138fd1498Szrj   if (orig_count < 0 || orig_count >= (int) GET_MODE_UNIT_PRECISION (mode))
1052238fd1498Szrj     return NULL_RTX;
1052338fd1498Szrj 
1052438fd1498Szrj   count = orig_count;
1052538fd1498Szrj 
1052638fd1498Szrj   /* Unless one of the branches of the `if' in this loop does a `continue',
1052738fd1498Szrj      we will `break' the loop after the `if'.  */
1052838fd1498Szrj 
1052938fd1498Szrj   while (count != 0)
1053038fd1498Szrj     {
1053138fd1498Szrj       /* If we have an operand of (clobber (const_int 0)), fail.  */
1053238fd1498Szrj       if (GET_CODE (varop) == CLOBBER)
1053338fd1498Szrj 	return NULL_RTX;
1053438fd1498Szrj 
1053538fd1498Szrj       /* Convert ROTATERT to ROTATE.  */
1053638fd1498Szrj       if (code == ROTATERT)
1053738fd1498Szrj 	{
1053838fd1498Szrj 	  unsigned int bitsize = GET_MODE_UNIT_PRECISION (result_mode);
1053938fd1498Szrj 	  code = ROTATE;
1054038fd1498Szrj 	  count = bitsize - count;
1054138fd1498Szrj 	}
1054238fd1498Szrj 
1054338fd1498Szrj       shift_mode = result_mode;
1054438fd1498Szrj       if (shift_mode != mode)
1054538fd1498Szrj 	{
1054638fd1498Szrj 	  /* We only change the modes of scalar shifts.  */
1054738fd1498Szrj 	  int_mode = as_a <scalar_int_mode> (mode);
1054838fd1498Szrj 	  int_result_mode = as_a <scalar_int_mode> (result_mode);
1054938fd1498Szrj 	  shift_mode = try_widen_shift_mode (code, varop, count,
1055038fd1498Szrj 					     int_result_mode, int_mode,
1055138fd1498Szrj 					     outer_op, outer_const);
1055238fd1498Szrj 	}
1055338fd1498Szrj 
1055438fd1498Szrj       scalar_int_mode shift_unit_mode
1055538fd1498Szrj 	= as_a <scalar_int_mode> (GET_MODE_INNER (shift_mode));
1055638fd1498Szrj 
1055738fd1498Szrj       /* Handle cases where the count is greater than the size of the mode
1055838fd1498Szrj 	 minus 1.  For ASHIFT, use the size minus one as the count (this can
1055938fd1498Szrj 	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
1056038fd1498Szrj 	 take the count modulo the size.  For other shifts, the result is
1056138fd1498Szrj 	 zero.
1056238fd1498Szrj 
1056338fd1498Szrj 	 Since these shifts are being produced by the compiler by combining
1056438fd1498Szrj 	 multiple operations, each of which are defined, we know what the
1056538fd1498Szrj 	 result is supposed to be.  */
1056638fd1498Szrj 
1056738fd1498Szrj       if (count > (GET_MODE_PRECISION (shift_unit_mode) - 1))
1056838fd1498Szrj 	{
1056938fd1498Szrj 	  if (code == ASHIFTRT)
1057038fd1498Szrj 	    count = GET_MODE_PRECISION (shift_unit_mode) - 1;
1057138fd1498Szrj 	  else if (code == ROTATE || code == ROTATERT)
1057238fd1498Szrj 	    count %= GET_MODE_PRECISION (shift_unit_mode);
1057338fd1498Szrj 	  else
1057438fd1498Szrj 	    {
1057538fd1498Szrj 	      /* We can't simply return zero because there may be an
1057638fd1498Szrj 		 outer op.  */
1057738fd1498Szrj 	      varop = const0_rtx;
1057838fd1498Szrj 	      count = 0;
1057938fd1498Szrj 	      break;
1058038fd1498Szrj 	    }
1058138fd1498Szrj 	}
1058238fd1498Szrj 
1058338fd1498Szrj       /* If we discovered we had to complement VAROP, leave.  Making a NOT
1058438fd1498Szrj 	 here would cause an infinite loop.  */
1058538fd1498Szrj       if (complement_p)
1058638fd1498Szrj 	break;
1058738fd1498Szrj 
1058838fd1498Szrj       if (shift_mode == shift_unit_mode)
1058938fd1498Szrj 	{
1059038fd1498Szrj 	  /* An arithmetic right shift of a quantity known to be -1 or 0
1059138fd1498Szrj 	     is a no-op.  */
1059238fd1498Szrj 	  if (code == ASHIFTRT
1059338fd1498Szrj 	      && (num_sign_bit_copies (varop, shift_unit_mode)
1059438fd1498Szrj 		  == GET_MODE_PRECISION (shift_unit_mode)))
1059538fd1498Szrj 	    {
1059638fd1498Szrj 	      count = 0;
1059738fd1498Szrj 	      break;
1059838fd1498Szrj 	    }
1059938fd1498Szrj 
1060038fd1498Szrj 	  /* If we are doing an arithmetic right shift and discarding all but
1060138fd1498Szrj 	     the sign bit copies, this is equivalent to doing a shift by the
1060238fd1498Szrj 	     bitsize minus one.  Convert it into that shift because it will
1060338fd1498Szrj 	     often allow other simplifications.  */
1060438fd1498Szrj 
1060538fd1498Szrj 	  if (code == ASHIFTRT
1060638fd1498Szrj 	      && (count + num_sign_bit_copies (varop, shift_unit_mode)
1060738fd1498Szrj 		  >= GET_MODE_PRECISION (shift_unit_mode)))
1060838fd1498Szrj 	    count = GET_MODE_PRECISION (shift_unit_mode) - 1;
1060938fd1498Szrj 
1061038fd1498Szrj 	  /* We simplify the tests below and elsewhere by converting
1061138fd1498Szrj 	     ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
1061238fd1498Szrj 	     `make_compound_operation' will convert it to an ASHIFTRT for
1061338fd1498Szrj 	     those machines (such as VAX) that don't have an LSHIFTRT.  */
1061438fd1498Szrj 	  if (code == ASHIFTRT
1061538fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
1061638fd1498Szrj 	      && val_signbit_known_clear_p (shift_unit_mode,
1061738fd1498Szrj 					    nonzero_bits (varop,
1061838fd1498Szrj 							  shift_unit_mode)))
1061938fd1498Szrj 	    code = LSHIFTRT;
1062038fd1498Szrj 
1062138fd1498Szrj 	  if (((code == LSHIFTRT
1062238fd1498Szrj 		&& HWI_COMPUTABLE_MODE_P (shift_unit_mode)
1062338fd1498Szrj 		&& !(nonzero_bits (varop, shift_unit_mode) >> count))
1062438fd1498Szrj 	       || (code == ASHIFT
1062538fd1498Szrj 		   && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
1062638fd1498Szrj 		   && !((nonzero_bits (varop, shift_unit_mode) << count)
1062738fd1498Szrj 			& GET_MODE_MASK (shift_unit_mode))))
1062838fd1498Szrj 	      && !side_effects_p (varop))
1062938fd1498Szrj 	    varop = const0_rtx;
1063038fd1498Szrj 	}
1063138fd1498Szrj 
1063238fd1498Szrj       switch (GET_CODE (varop))
1063338fd1498Szrj 	{
1063438fd1498Szrj 	case SIGN_EXTEND:
1063538fd1498Szrj 	case ZERO_EXTEND:
1063638fd1498Szrj 	case SIGN_EXTRACT:
1063738fd1498Szrj 	case ZERO_EXTRACT:
1063838fd1498Szrj 	  new_rtx = expand_compound_operation (varop);
1063938fd1498Szrj 	  if (new_rtx != varop)
1064038fd1498Szrj 	    {
1064138fd1498Szrj 	      varop = new_rtx;
1064238fd1498Szrj 	      continue;
1064338fd1498Szrj 	    }
1064438fd1498Szrj 	  break;
1064538fd1498Szrj 
1064638fd1498Szrj 	case MEM:
1064738fd1498Szrj 	  /* The following rules apply only to scalars.  */
1064838fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1064938fd1498Szrj 	    break;
1065038fd1498Szrj 	  int_mode = as_a <scalar_int_mode> (mode);
1065138fd1498Szrj 
1065238fd1498Szrj 	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
1065338fd1498Szrj 	     minus the width of a smaller mode, we can do this with a
1065438fd1498Szrj 	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
1065538fd1498Szrj 	  if ((code == ASHIFTRT || code == LSHIFTRT)
1065638fd1498Szrj 	      && ! mode_dependent_address_p (XEXP (varop, 0),
1065738fd1498Szrj 					     MEM_ADDR_SPACE (varop))
1065838fd1498Szrj 	      && ! MEM_VOLATILE_P (varop)
1065938fd1498Szrj 	      && (int_mode_for_size (GET_MODE_BITSIZE (int_mode) - count, 1)
1066038fd1498Szrj 		  .exists (&tmode)))
1066138fd1498Szrj 	    {
1066238fd1498Szrj 	      new_rtx = adjust_address_nv (varop, tmode,
1066338fd1498Szrj 					   BYTES_BIG_ENDIAN ? 0
1066438fd1498Szrj 					   : count / BITS_PER_UNIT);
1066538fd1498Szrj 
1066638fd1498Szrj 	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
1066738fd1498Szrj 				     : ZERO_EXTEND, int_mode, new_rtx);
1066838fd1498Szrj 	      count = 0;
1066938fd1498Szrj 	      continue;
1067038fd1498Szrj 	    }
1067138fd1498Szrj 	  break;
1067238fd1498Szrj 
1067338fd1498Szrj 	case SUBREG:
1067438fd1498Szrj 	  /* The following rules apply only to scalars.  */
1067538fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1067638fd1498Szrj 	    break;
1067738fd1498Szrj 	  int_mode = as_a <scalar_int_mode> (mode);
1067838fd1498Szrj 	  int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
1067938fd1498Szrj 
1068038fd1498Szrj 	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
1068138fd1498Szrj 	     the same number of words as what we've seen so far.  Then store
1068238fd1498Szrj 	     the widest mode in MODE.  */
1068338fd1498Szrj 	  if (subreg_lowpart_p (varop)
1068438fd1498Szrj 	      && is_int_mode (GET_MODE (SUBREG_REG (varop)), &inner_mode)
1068538fd1498Szrj 	      && GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (int_varop_mode)
1068638fd1498Szrj 	      && (CEIL (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
1068738fd1498Szrj 		  == CEIL (GET_MODE_SIZE (int_mode), UNITS_PER_WORD))
1068838fd1498Szrj 	      && GET_MODE_CLASS (int_varop_mode) == MODE_INT)
1068938fd1498Szrj 	    {
1069038fd1498Szrj 	      varop = SUBREG_REG (varop);
1069138fd1498Szrj 	      if (GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (int_mode))
1069238fd1498Szrj 		mode = inner_mode;
1069338fd1498Szrj 	      continue;
1069438fd1498Szrj 	    }
1069538fd1498Szrj 	  break;
1069638fd1498Szrj 
1069738fd1498Szrj 	case MULT:
1069838fd1498Szrj 	  /* Some machines use MULT instead of ASHIFT because MULT
1069938fd1498Szrj 	     is cheaper.  But it is still better on those machines to
1070038fd1498Szrj 	     merge two shifts into one.  */
1070138fd1498Szrj 	  if (CONST_INT_P (XEXP (varop, 1))
1070238fd1498Szrj 	      && (log2 = exact_log2 (UINTVAL (XEXP (varop, 1)))) >= 0)
1070338fd1498Szrj 	    {
1070438fd1498Szrj 	      rtx log2_rtx = gen_int_shift_amount (GET_MODE (varop), log2);
1070538fd1498Szrj 	      varop = simplify_gen_binary (ASHIFT, GET_MODE (varop),
1070638fd1498Szrj 					   XEXP (varop, 0), log2_rtx);
1070738fd1498Szrj 	      continue;
1070838fd1498Szrj 	    }
1070938fd1498Szrj 	  break;
1071038fd1498Szrj 
1071138fd1498Szrj 	case UDIV:
1071238fd1498Szrj 	  /* Similar, for when divides are cheaper.  */
1071338fd1498Szrj 	  if (CONST_INT_P (XEXP (varop, 1))
1071438fd1498Szrj 	      && (log2 = exact_log2 (UINTVAL (XEXP (varop, 1)))) >= 0)
1071538fd1498Szrj 	    {
1071638fd1498Szrj 	      rtx log2_rtx = gen_int_shift_amount (GET_MODE (varop), log2);
1071738fd1498Szrj 	      varop = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
1071838fd1498Szrj 					   XEXP (varop, 0), log2_rtx);
1071938fd1498Szrj 	      continue;
1072038fd1498Szrj 	    }
1072138fd1498Szrj 	  break;
1072238fd1498Szrj 
1072338fd1498Szrj 	case ASHIFTRT:
1072438fd1498Szrj 	  /* If we are extracting just the sign bit of an arithmetic
1072538fd1498Szrj 	     right shift, that shift is not needed.  However, the sign
1072638fd1498Szrj 	     bit of a wider mode may be different from what would be
1072738fd1498Szrj 	     interpreted as the sign bit in a narrower mode, so, if
1072838fd1498Szrj 	     the result is narrower, don't discard the shift.  */
1072938fd1498Szrj 	  if (code == LSHIFTRT
1073038fd1498Szrj 	      && count == (GET_MODE_UNIT_BITSIZE (result_mode) - 1)
1073138fd1498Szrj 	      && (GET_MODE_UNIT_BITSIZE (result_mode)
1073238fd1498Szrj 		  >= GET_MODE_UNIT_BITSIZE (GET_MODE (varop))))
1073338fd1498Szrj 	    {
1073438fd1498Szrj 	      varop = XEXP (varop, 0);
1073538fd1498Szrj 	      continue;
1073638fd1498Szrj 	    }
1073738fd1498Szrj 
1073838fd1498Szrj 	  /* fall through */
1073938fd1498Szrj 
1074038fd1498Szrj 	case LSHIFTRT:
1074138fd1498Szrj 	case ASHIFT:
1074238fd1498Szrj 	case ROTATE:
1074338fd1498Szrj 	  /* The following rules apply only to scalars.  */
1074438fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1074538fd1498Szrj 	    break;
1074638fd1498Szrj 	  int_mode = as_a <scalar_int_mode> (mode);
1074738fd1498Szrj 	  int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
1074838fd1498Szrj 	  int_result_mode = as_a <scalar_int_mode> (result_mode);
1074938fd1498Szrj 
1075038fd1498Szrj 	  /* Here we have two nested shifts.  The result is usually the
1075138fd1498Szrj 	     AND of a new shift with a mask.  We compute the result below.  */
1075238fd1498Szrj 	  if (CONST_INT_P (XEXP (varop, 1))
1075338fd1498Szrj 	      && INTVAL (XEXP (varop, 1)) >= 0
1075438fd1498Szrj 	      && INTVAL (XEXP (varop, 1)) < GET_MODE_PRECISION (int_varop_mode)
1075538fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (int_result_mode)
1075638fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (int_mode))
1075738fd1498Szrj 	    {
1075838fd1498Szrj 	      enum rtx_code first_code = GET_CODE (varop);
1075938fd1498Szrj 	      unsigned int first_count = INTVAL (XEXP (varop, 1));
1076038fd1498Szrj 	      unsigned HOST_WIDE_INT mask;
1076138fd1498Szrj 	      rtx mask_rtx;
1076238fd1498Szrj 
1076338fd1498Szrj 	      /* We have one common special case.  We can't do any merging if
1076438fd1498Szrj 		 the inner code is an ASHIFTRT of a smaller mode.  However, if
1076538fd1498Szrj 		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
1076638fd1498Szrj 		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
1076738fd1498Szrj 		 we can convert it to
1076838fd1498Szrj 		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0) C3) C2) C1).
1076938fd1498Szrj 		 This simplifies certain SIGN_EXTEND operations.  */
1077038fd1498Szrj 	      if (code == ASHIFT && first_code == ASHIFTRT
1077138fd1498Szrj 		  && count == (GET_MODE_PRECISION (int_result_mode)
1077238fd1498Szrj 			       - GET_MODE_PRECISION (int_varop_mode)))
1077338fd1498Szrj 		{
1077438fd1498Szrj 		  /* C3 has the low-order C1 bits zero.  */
1077538fd1498Szrj 
1077638fd1498Szrj 		  mask = GET_MODE_MASK (int_mode)
1077738fd1498Szrj 			 & ~((HOST_WIDE_INT_1U << first_count) - 1);
1077838fd1498Szrj 
1077938fd1498Szrj 		  varop = simplify_and_const_int (NULL_RTX, int_result_mode,
1078038fd1498Szrj 						  XEXP (varop, 0), mask);
1078138fd1498Szrj 		  varop = simplify_shift_const (NULL_RTX, ASHIFT,
1078238fd1498Szrj 						int_result_mode, varop, count);
1078338fd1498Szrj 		  count = first_count;
1078438fd1498Szrj 		  code = ASHIFTRT;
1078538fd1498Szrj 		  continue;
1078638fd1498Szrj 		}
1078738fd1498Szrj 
1078838fd1498Szrj 	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
1078938fd1498Szrj 		 than C1 high-order bits equal to the sign bit, we can convert
1079038fd1498Szrj 		 this to either an ASHIFT or an ASHIFTRT depending on the
1079138fd1498Szrj 		 two counts.
1079238fd1498Szrj 
1079338fd1498Szrj 		 We cannot do this if VAROP's mode is not SHIFT_UNIT_MODE.  */
1079438fd1498Szrj 
1079538fd1498Szrj 	      if (code == ASHIFTRT && first_code == ASHIFT
1079638fd1498Szrj 		  && int_varop_mode == shift_unit_mode
1079738fd1498Szrj 		  && (num_sign_bit_copies (XEXP (varop, 0), shift_unit_mode)
1079838fd1498Szrj 		      > first_count))
1079938fd1498Szrj 		{
1080038fd1498Szrj 		  varop = XEXP (varop, 0);
1080138fd1498Szrj 		  count -= first_count;
1080238fd1498Szrj 		  if (count < 0)
1080338fd1498Szrj 		    {
1080438fd1498Szrj 		      count = -count;
1080538fd1498Szrj 		      code = ASHIFT;
1080638fd1498Szrj 		    }
1080738fd1498Szrj 
1080838fd1498Szrj 		  continue;
1080938fd1498Szrj 		}
1081038fd1498Szrj 
1081138fd1498Szrj 	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
1081238fd1498Szrj 		 we can only do this if FIRST_CODE is also ASHIFTRT.
1081338fd1498Szrj 
1081438fd1498Szrj 		 We can't do the case when CODE is ROTATE and FIRST_CODE is
1081538fd1498Szrj 		 ASHIFTRT.
1081638fd1498Szrj 
1081738fd1498Szrj 		 If the mode of this shift is not the mode of the outer shift,
1081838fd1498Szrj 		 we can't do this if either shift is a right shift or ROTATE.
1081938fd1498Szrj 
1082038fd1498Szrj 		 Finally, we can't do any of these if the mode is too wide
1082138fd1498Szrj 		 unless the codes are the same.
1082238fd1498Szrj 
1082338fd1498Szrj 		 Handle the case where the shift codes are the same
1082438fd1498Szrj 		 first.  */
1082538fd1498Szrj 
1082638fd1498Szrj 	      if (code == first_code)
1082738fd1498Szrj 		{
1082838fd1498Szrj 		  if (int_varop_mode != int_result_mode
1082938fd1498Szrj 		      && (code == ASHIFTRT || code == LSHIFTRT
1083038fd1498Szrj 			  || code == ROTATE))
1083138fd1498Szrj 		    break;
1083238fd1498Szrj 
1083338fd1498Szrj 		  count += first_count;
1083438fd1498Szrj 		  varop = XEXP (varop, 0);
1083538fd1498Szrj 		  continue;
1083638fd1498Szrj 		}
1083738fd1498Szrj 
1083838fd1498Szrj 	      if (code == ASHIFTRT
1083938fd1498Szrj 		  || (code == ROTATE && first_code == ASHIFTRT)
1084038fd1498Szrj 		  || GET_MODE_PRECISION (int_mode) > HOST_BITS_PER_WIDE_INT
1084138fd1498Szrj 		  || (int_varop_mode != int_result_mode
1084238fd1498Szrj 		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
1084338fd1498Szrj 			  || first_code == ROTATE
1084438fd1498Szrj 			  || code == ROTATE)))
1084538fd1498Szrj 		break;
1084638fd1498Szrj 
1084738fd1498Szrj 	      /* To compute the mask to apply after the shift, shift the
1084838fd1498Szrj 		 nonzero bits of the inner shift the same way the
1084938fd1498Szrj 		 outer shift will.  */
1085038fd1498Szrj 
1085138fd1498Szrj 	      mask_rtx = gen_int_mode (nonzero_bits (varop, int_varop_mode),
1085238fd1498Szrj 				       int_result_mode);
1085338fd1498Szrj 	      rtx count_rtx = gen_int_shift_amount (int_result_mode, count);
1085438fd1498Szrj 	      mask_rtx
1085538fd1498Szrj 		= simplify_const_binary_operation (code, int_result_mode,
1085638fd1498Szrj 						   mask_rtx, count_rtx);
1085738fd1498Szrj 
1085838fd1498Szrj 	      /* Give up if we can't compute an outer operation to use.  */
1085938fd1498Szrj 	      if (mask_rtx == 0
1086038fd1498Szrj 		  || !CONST_INT_P (mask_rtx)
1086138fd1498Szrj 		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
1086238fd1498Szrj 					INTVAL (mask_rtx),
1086338fd1498Szrj 					int_result_mode, &complement_p))
1086438fd1498Szrj 		break;
1086538fd1498Szrj 
1086638fd1498Szrj 	      /* If the shifts are in the same direction, we add the
1086738fd1498Szrj 		 counts.  Otherwise, we subtract them.  */
1086838fd1498Szrj 	      if ((code == ASHIFTRT || code == LSHIFTRT)
1086938fd1498Szrj 		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
1087038fd1498Szrj 		count += first_count;
1087138fd1498Szrj 	      else
1087238fd1498Szrj 		count -= first_count;
1087338fd1498Szrj 
1087438fd1498Szrj 	      /* If COUNT is positive, the new shift is usually CODE,
1087538fd1498Szrj 		 except for the two exceptions below, in which case it is
1087638fd1498Szrj 		 FIRST_CODE.  If the count is negative, FIRST_CODE should
1087738fd1498Szrj 		 always be used  */
1087838fd1498Szrj 	      if (count > 0
1087938fd1498Szrj 		  && ((first_code == ROTATE && code == ASHIFT)
1088038fd1498Szrj 		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
1088138fd1498Szrj 		code = first_code;
1088238fd1498Szrj 	      else if (count < 0)
1088338fd1498Szrj 		code = first_code, count = -count;
1088438fd1498Szrj 
1088538fd1498Szrj 	      varop = XEXP (varop, 0);
1088638fd1498Szrj 	      continue;
1088738fd1498Szrj 	    }
1088838fd1498Szrj 
1088938fd1498Szrj 	  /* If we have (A << B << C) for any shift, we can convert this to
1089038fd1498Szrj 	     (A << C << B).  This wins if A is a constant.  Only try this if
1089138fd1498Szrj 	     B is not a constant.  */
1089238fd1498Szrj 
1089338fd1498Szrj 	  else if (GET_CODE (varop) == code
1089438fd1498Szrj 		   && CONST_INT_P (XEXP (varop, 0))
1089538fd1498Szrj 		   && !CONST_INT_P (XEXP (varop, 1)))
1089638fd1498Szrj 	    {
1089738fd1498Szrj 	      /* For ((unsigned) (cstULL >> count)) >> cst2 we have to make
1089838fd1498Szrj 		 sure the result will be masked.  See PR70222.  */
1089938fd1498Szrj 	      if (code == LSHIFTRT
1090038fd1498Szrj 		  && int_mode != int_result_mode
1090138fd1498Szrj 		  && !merge_outer_ops (&outer_op, &outer_const, AND,
1090238fd1498Szrj 				       GET_MODE_MASK (int_result_mode)
1090338fd1498Szrj 				       >> orig_count, int_result_mode,
1090438fd1498Szrj 				       &complement_p))
1090538fd1498Szrj 		break;
1090638fd1498Szrj 	      /* For ((int) (cstLL >> count)) >> cst2 just give up.  Queuing
1090738fd1498Szrj 		 up outer sign extension (often left and right shift) is
1090838fd1498Szrj 		 hardly more efficient than the original.  See PR70429.  */
1090938fd1498Szrj 	      if (code == ASHIFTRT && int_mode != int_result_mode)
1091038fd1498Szrj 		break;
1091138fd1498Szrj 
1091238fd1498Szrj 	      rtx count_rtx = gen_int_shift_amount (int_result_mode, count);
1091338fd1498Szrj 	      rtx new_rtx = simplify_const_binary_operation (code, int_mode,
1091438fd1498Szrj 							     XEXP (varop, 0),
1091538fd1498Szrj 							     count_rtx);
1091638fd1498Szrj 	      varop = gen_rtx_fmt_ee (code, int_mode, new_rtx, XEXP (varop, 1));
1091738fd1498Szrj 	      count = 0;
1091838fd1498Szrj 	      continue;
1091938fd1498Szrj 	    }
1092038fd1498Szrj 	  break;
1092138fd1498Szrj 
1092238fd1498Szrj 	case NOT:
1092338fd1498Szrj 	  /* The following rules apply only to scalars.  */
1092438fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1092538fd1498Szrj 	    break;
1092638fd1498Szrj 
1092738fd1498Szrj 	  /* Make this fit the case below.  */
1092838fd1498Szrj 	  varop = gen_rtx_XOR (mode, XEXP (varop, 0), constm1_rtx);
1092938fd1498Szrj 	  continue;
1093038fd1498Szrj 
1093138fd1498Szrj 	case IOR:
1093238fd1498Szrj 	case AND:
1093338fd1498Szrj 	case XOR:
1093438fd1498Szrj 	  /* The following rules apply only to scalars.  */
1093538fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1093638fd1498Szrj 	    break;
1093738fd1498Szrj 	  int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
1093838fd1498Szrj 	  int_result_mode = as_a <scalar_int_mode> (result_mode);
1093938fd1498Szrj 
1094038fd1498Szrj 	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
1094138fd1498Szrj 	     with C the size of VAROP - 1 and the shift is logical if
1094238fd1498Szrj 	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
1094338fd1498Szrj 	     we have an (le X 0) operation.   If we have an arithmetic shift
1094438fd1498Szrj 	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
1094538fd1498Szrj 	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
1094638fd1498Szrj 
1094738fd1498Szrj 	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
1094838fd1498Szrj 	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
1094938fd1498Szrj 	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
1095038fd1498Szrj 	      && (code == LSHIFTRT || code == ASHIFTRT)
1095138fd1498Szrj 	      && count == (GET_MODE_PRECISION (int_varop_mode) - 1)
1095238fd1498Szrj 	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
1095338fd1498Szrj 	    {
1095438fd1498Szrj 	      count = 0;
1095538fd1498Szrj 	      varop = gen_rtx_LE (int_varop_mode, XEXP (varop, 1),
1095638fd1498Szrj 				  const0_rtx);
1095738fd1498Szrj 
1095838fd1498Szrj 	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
1095938fd1498Szrj 		varop = gen_rtx_NEG (int_varop_mode, varop);
1096038fd1498Szrj 
1096138fd1498Szrj 	      continue;
1096238fd1498Szrj 	    }
1096338fd1498Szrj 
1096438fd1498Szrj 	  /* If we have (shift (logical)), move the logical to the outside
1096538fd1498Szrj 	     to allow it to possibly combine with another logical and the
1096638fd1498Szrj 	     shift to combine with another shift.  This also canonicalizes to
1096738fd1498Szrj 	     what a ZERO_EXTRACT looks like.  Also, some machines have
1096838fd1498Szrj 	     (and (shift)) insns.  */
1096938fd1498Szrj 
1097038fd1498Szrj 	  if (CONST_INT_P (XEXP (varop, 1))
1097138fd1498Szrj 	      /* We can't do this if we have (ashiftrt (xor))  and the
1097238fd1498Szrj 		 constant has its sign bit set in shift_unit_mode with
1097338fd1498Szrj 		 shift_unit_mode wider than result_mode.  */
1097438fd1498Szrj 	      && !(code == ASHIFTRT && GET_CODE (varop) == XOR
1097538fd1498Szrj 		   && int_result_mode != shift_unit_mode
1097638fd1498Szrj 		   && trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
1097738fd1498Szrj 					  shift_unit_mode) < 0)
1097838fd1498Szrj 	      && (new_rtx = simplify_const_binary_operation
1097938fd1498Szrj 		  (code, int_result_mode,
1098038fd1498Szrj 		   gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
1098138fd1498Szrj 		   gen_int_shift_amount (int_result_mode, count))) != 0
1098238fd1498Szrj 	      && CONST_INT_P (new_rtx)
1098338fd1498Szrj 	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
1098438fd1498Szrj 				  INTVAL (new_rtx), int_result_mode,
1098538fd1498Szrj 				  &complement_p))
1098638fd1498Szrj 	    {
1098738fd1498Szrj 	      varop = XEXP (varop, 0);
1098838fd1498Szrj 	      continue;
1098938fd1498Szrj 	    }
1099038fd1498Szrj 
1099138fd1498Szrj 	  /* If we can't do that, try to simplify the shift in each arm of the
1099238fd1498Szrj 	     logical expression, make a new logical expression, and apply
1099338fd1498Szrj 	     the inverse distributive law.  This also can't be done for
1099438fd1498Szrj 	     (ashiftrt (xor)) where we've widened the shift and the constant
1099538fd1498Szrj 	     changes the sign bit.  */
1099638fd1498Szrj 	  if (CONST_INT_P (XEXP (varop, 1))
1099738fd1498Szrj 	      && !(code == ASHIFTRT && GET_CODE (varop) == XOR
1099838fd1498Szrj 		   && int_result_mode != shift_unit_mode
1099938fd1498Szrj 		   && trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
1100038fd1498Szrj 					  shift_unit_mode) < 0))
1100138fd1498Szrj 	    {
1100238fd1498Szrj 	      rtx lhs = simplify_shift_const (NULL_RTX, code, shift_unit_mode,
1100338fd1498Szrj 					      XEXP (varop, 0), count);
1100438fd1498Szrj 	      rtx rhs = simplify_shift_const (NULL_RTX, code, shift_unit_mode,
1100538fd1498Szrj 					      XEXP (varop, 1), count);
1100638fd1498Szrj 
1100738fd1498Szrj 	      varop = simplify_gen_binary (GET_CODE (varop), shift_unit_mode,
1100838fd1498Szrj 					   lhs, rhs);
1100938fd1498Szrj 	      varop = apply_distributive_law (varop);
1101038fd1498Szrj 
1101138fd1498Szrj 	      count = 0;
1101238fd1498Szrj 	      continue;
1101338fd1498Szrj 	    }
1101438fd1498Szrj 	  break;
1101538fd1498Szrj 
1101638fd1498Szrj 	case EQ:
1101738fd1498Szrj 	  /* The following rules apply only to scalars.  */
1101838fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1101938fd1498Szrj 	    break;
1102038fd1498Szrj 	  int_result_mode = as_a <scalar_int_mode> (result_mode);
1102138fd1498Szrj 
1102238fd1498Szrj 	  /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
1102338fd1498Szrj 	     says that the sign bit can be tested, FOO has mode MODE, C is
1102438fd1498Szrj 	     GET_MODE_PRECISION (MODE) - 1, and FOO has only its low-order bit
1102538fd1498Szrj 	     that may be nonzero.  */
1102638fd1498Szrj 	  if (code == LSHIFTRT
1102738fd1498Szrj 	      && XEXP (varop, 1) == const0_rtx
1102838fd1498Szrj 	      && GET_MODE (XEXP (varop, 0)) == int_result_mode
1102938fd1498Szrj 	      && count == (GET_MODE_PRECISION (int_result_mode) - 1)
1103038fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (int_result_mode)
1103138fd1498Szrj 	      && STORE_FLAG_VALUE == -1
1103238fd1498Szrj 	      && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1
1103338fd1498Szrj 	      && merge_outer_ops (&outer_op, &outer_const, XOR, 1,
1103438fd1498Szrj 				  int_result_mode, &complement_p))
1103538fd1498Szrj 	    {
1103638fd1498Szrj 	      varop = XEXP (varop, 0);
1103738fd1498Szrj 	      count = 0;
1103838fd1498Szrj 	      continue;
1103938fd1498Szrj 	    }
1104038fd1498Szrj 	  break;
1104138fd1498Szrj 
1104238fd1498Szrj 	case NEG:
1104338fd1498Szrj 	  /* The following rules apply only to scalars.  */
1104438fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1104538fd1498Szrj 	    break;
1104638fd1498Szrj 	  int_result_mode = as_a <scalar_int_mode> (result_mode);
1104738fd1498Szrj 
1104838fd1498Szrj 	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
1104938fd1498Szrj 	     than the number of bits in the mode is equivalent to A.  */
1105038fd1498Szrj 	  if (code == LSHIFTRT
1105138fd1498Szrj 	      && count == (GET_MODE_PRECISION (int_result_mode) - 1)
1105238fd1498Szrj 	      && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1)
1105338fd1498Szrj 	    {
1105438fd1498Szrj 	      varop = XEXP (varop, 0);
1105538fd1498Szrj 	      count = 0;
1105638fd1498Szrj 	      continue;
1105738fd1498Szrj 	    }
1105838fd1498Szrj 
1105938fd1498Szrj 	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
1106038fd1498Szrj 	     NEG outside to allow shifts to combine.  */
1106138fd1498Szrj 	  if (code == ASHIFT
1106238fd1498Szrj 	      && merge_outer_ops (&outer_op, &outer_const, NEG, 0,
1106338fd1498Szrj 				  int_result_mode, &complement_p))
1106438fd1498Szrj 	    {
1106538fd1498Szrj 	      varop = XEXP (varop, 0);
1106638fd1498Szrj 	      continue;
1106738fd1498Szrj 	    }
1106838fd1498Szrj 	  break;
1106938fd1498Szrj 
1107038fd1498Szrj 	case PLUS:
1107138fd1498Szrj 	  /* The following rules apply only to scalars.  */
1107238fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1107338fd1498Szrj 	    break;
1107438fd1498Szrj 	  int_result_mode = as_a <scalar_int_mode> (result_mode);
1107538fd1498Szrj 
1107638fd1498Szrj 	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
1107738fd1498Szrj 	     is one less than the number of bits in the mode is
1107838fd1498Szrj 	     equivalent to (xor A 1).  */
1107938fd1498Szrj 	  if (code == LSHIFTRT
1108038fd1498Szrj 	      && count == (GET_MODE_PRECISION (int_result_mode) - 1)
1108138fd1498Szrj 	      && XEXP (varop, 1) == constm1_rtx
1108238fd1498Szrj 	      && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1
1108338fd1498Szrj 	      && merge_outer_ops (&outer_op, &outer_const, XOR, 1,
1108438fd1498Szrj 				  int_result_mode, &complement_p))
1108538fd1498Szrj 	    {
1108638fd1498Szrj 	      count = 0;
1108738fd1498Szrj 	      varop = XEXP (varop, 0);
1108838fd1498Szrj 	      continue;
1108938fd1498Szrj 	    }
1109038fd1498Szrj 
1109138fd1498Szrj 	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
1109238fd1498Szrj 	     that might be nonzero in BAR are those being shifted out and those
1109338fd1498Szrj 	     bits are known zero in FOO, we can replace the PLUS with FOO.
1109438fd1498Szrj 	     Similarly in the other operand order.  This code occurs when
1109538fd1498Szrj 	     we are computing the size of a variable-size array.  */
1109638fd1498Szrj 
1109738fd1498Szrj 	  if ((code == ASHIFTRT || code == LSHIFTRT)
1109838fd1498Szrj 	      && count < HOST_BITS_PER_WIDE_INT
1109938fd1498Szrj 	      && nonzero_bits (XEXP (varop, 1), int_result_mode) >> count == 0
1110038fd1498Szrj 	      && (nonzero_bits (XEXP (varop, 1), int_result_mode)
1110138fd1498Szrj 		  & nonzero_bits (XEXP (varop, 0), int_result_mode)) == 0)
1110238fd1498Szrj 	    {
1110338fd1498Szrj 	      varop = XEXP (varop, 0);
1110438fd1498Szrj 	      continue;
1110538fd1498Szrj 	    }
1110638fd1498Szrj 	  else if ((code == ASHIFTRT || code == LSHIFTRT)
1110738fd1498Szrj 		   && count < HOST_BITS_PER_WIDE_INT
1110838fd1498Szrj 		   && HWI_COMPUTABLE_MODE_P (int_result_mode)
1110938fd1498Szrj 		   && (nonzero_bits (XEXP (varop, 0), int_result_mode)
1111038fd1498Szrj 		       >> count) == 0
1111138fd1498Szrj 		   && (nonzero_bits (XEXP (varop, 0), int_result_mode)
1111238fd1498Szrj 		       & nonzero_bits (XEXP (varop, 1), int_result_mode)) == 0)
1111338fd1498Szrj 	    {
1111438fd1498Szrj 	      varop = XEXP (varop, 1);
1111538fd1498Szrj 	      continue;
1111638fd1498Szrj 	    }
1111738fd1498Szrj 
1111838fd1498Szrj 	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
1111938fd1498Szrj 	  if (code == ASHIFT
1112038fd1498Szrj 	      && CONST_INT_P (XEXP (varop, 1))
1112138fd1498Szrj 	      && (new_rtx = simplify_const_binary_operation
1112238fd1498Szrj 		  (ASHIFT, int_result_mode,
1112338fd1498Szrj 		   gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
1112438fd1498Szrj 		   gen_int_shift_amount (int_result_mode, count))) != 0
1112538fd1498Szrj 	      && CONST_INT_P (new_rtx)
1112638fd1498Szrj 	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
1112738fd1498Szrj 				  INTVAL (new_rtx), int_result_mode,
1112838fd1498Szrj 				  &complement_p))
1112938fd1498Szrj 	    {
1113038fd1498Szrj 	      varop = XEXP (varop, 0);
1113138fd1498Szrj 	      continue;
1113238fd1498Szrj 	    }
1113338fd1498Szrj 
1113438fd1498Szrj 	  /* Check for 'PLUS signbit', which is the canonical form of 'XOR
1113538fd1498Szrj 	     signbit', and attempt to change the PLUS to an XOR and move it to
1113638fd1498Szrj 	     the outer operation as is done above in the AND/IOR/XOR case
1113738fd1498Szrj 	     leg for shift(logical). See details in logical handling above
1113838fd1498Szrj 	     for reasoning in doing so.  */
1113938fd1498Szrj 	  if (code == LSHIFTRT
1114038fd1498Szrj 	      && CONST_INT_P (XEXP (varop, 1))
1114138fd1498Szrj 	      && mode_signbit_p (int_result_mode, XEXP (varop, 1))
1114238fd1498Szrj 	      && (new_rtx = simplify_const_binary_operation
1114338fd1498Szrj 		  (code, int_result_mode,
1114438fd1498Szrj 		   gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
1114538fd1498Szrj 		   gen_int_shift_amount (int_result_mode, count))) != 0
1114638fd1498Szrj 	      && CONST_INT_P (new_rtx)
1114738fd1498Szrj 	      && merge_outer_ops (&outer_op, &outer_const, XOR,
1114838fd1498Szrj 				  INTVAL (new_rtx), int_result_mode,
1114938fd1498Szrj 				  &complement_p))
1115038fd1498Szrj 	    {
1115138fd1498Szrj 	      varop = XEXP (varop, 0);
1115238fd1498Szrj 	      continue;
1115338fd1498Szrj 	    }
1115438fd1498Szrj 
1115538fd1498Szrj 	  break;
1115638fd1498Szrj 
1115738fd1498Szrj 	case MINUS:
1115838fd1498Szrj 	  /* The following rules apply only to scalars.  */
1115938fd1498Szrj 	  if (shift_mode != shift_unit_mode)
1116038fd1498Szrj 	    break;
1116138fd1498Szrj 	  int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
1116238fd1498Szrj 
1116338fd1498Szrj 	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
1116438fd1498Szrj 	     with C the size of VAROP - 1 and the shift is logical if
1116538fd1498Szrj 	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
1116638fd1498Szrj 	     we have a (gt X 0) operation.  If the shift is arithmetic with
1116738fd1498Szrj 	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
1116838fd1498Szrj 	     we have a (neg (gt X 0)) operation.  */
1116938fd1498Szrj 
1117038fd1498Szrj 	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
1117138fd1498Szrj 	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
1117238fd1498Szrj 	      && count == (GET_MODE_PRECISION (int_varop_mode) - 1)
1117338fd1498Szrj 	      && (code == LSHIFTRT || code == ASHIFTRT)
1117438fd1498Szrj 	      && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
1117538fd1498Szrj 	      && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
1117638fd1498Szrj 	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
1117738fd1498Szrj 	    {
1117838fd1498Szrj 	      count = 0;
1117938fd1498Szrj 	      varop = gen_rtx_GT (int_varop_mode, XEXP (varop, 1),
1118038fd1498Szrj 				  const0_rtx);
1118138fd1498Szrj 
1118238fd1498Szrj 	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
1118338fd1498Szrj 		varop = gen_rtx_NEG (int_varop_mode, varop);
1118438fd1498Szrj 
1118538fd1498Szrj 	      continue;
1118638fd1498Szrj 	    }
1118738fd1498Szrj 	  break;
1118838fd1498Szrj 
1118938fd1498Szrj 	case TRUNCATE:
1119038fd1498Szrj 	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
1119138fd1498Szrj 	     if the truncate does not affect the value.  */
1119238fd1498Szrj 	  if (code == LSHIFTRT
1119338fd1498Szrj 	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
1119438fd1498Szrj 	      && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
1119538fd1498Szrj 	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
1119638fd1498Szrj 		  >= (GET_MODE_UNIT_PRECISION (GET_MODE (XEXP (varop, 0)))
1119738fd1498Szrj 		      - GET_MODE_UNIT_PRECISION (GET_MODE (varop)))))
1119838fd1498Szrj 	    {
1119938fd1498Szrj 	      rtx varop_inner = XEXP (varop, 0);
1120038fd1498Szrj 	      int new_count = count + INTVAL (XEXP (varop_inner, 1));
1120138fd1498Szrj 	      rtx new_count_rtx = gen_int_shift_amount (GET_MODE (varop_inner),
1120238fd1498Szrj 							new_count);
1120338fd1498Szrj 	      varop_inner = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
1120438fd1498Szrj 					      XEXP (varop_inner, 0),
1120538fd1498Szrj 					      new_count_rtx);
1120638fd1498Szrj 	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
1120738fd1498Szrj 	      count = 0;
1120838fd1498Szrj 	      continue;
1120938fd1498Szrj 	    }
1121038fd1498Szrj 	  break;
1121138fd1498Szrj 
1121238fd1498Szrj 	default:
1121338fd1498Szrj 	  break;
1121438fd1498Szrj 	}
1121538fd1498Szrj 
1121638fd1498Szrj       break;
1121738fd1498Szrj     }
1121838fd1498Szrj 
1121938fd1498Szrj   shift_mode = result_mode;
1122038fd1498Szrj   if (shift_mode != mode)
1122138fd1498Szrj     {
1122238fd1498Szrj       /* We only change the modes of scalar shifts.  */
1122338fd1498Szrj       int_mode = as_a <scalar_int_mode> (mode);
1122438fd1498Szrj       int_result_mode = as_a <scalar_int_mode> (result_mode);
1122538fd1498Szrj       shift_mode = try_widen_shift_mode (code, varop, count, int_result_mode,
1122638fd1498Szrj 					 int_mode, outer_op, outer_const);
1122738fd1498Szrj     }
1122838fd1498Szrj 
1122938fd1498Szrj   /* We have now finished analyzing the shift.  The result should be
1123038fd1498Szrj      a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
1123138fd1498Szrj      OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
1123238fd1498Szrj      to the result of the shift.  OUTER_CONST is the relevant constant,
1123338fd1498Szrj      but we must turn off all bits turned off in the shift.  */
1123438fd1498Szrj 
1123538fd1498Szrj   if (outer_op == UNKNOWN
1123638fd1498Szrj       && orig_code == code && orig_count == count
1123738fd1498Szrj       && varop == orig_varop
1123838fd1498Szrj       && shift_mode == GET_MODE (varop))
1123938fd1498Szrj     return NULL_RTX;
1124038fd1498Szrj 
1124138fd1498Szrj   /* Make a SUBREG if necessary.  If we can't make it, fail.  */
1124238fd1498Szrj   varop = gen_lowpart (shift_mode, varop);
1124338fd1498Szrj   if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
1124438fd1498Szrj     return NULL_RTX;
1124538fd1498Szrj 
1124638fd1498Szrj   /* If we have an outer operation and we just made a shift, it is
1124738fd1498Szrj      possible that we could have simplified the shift were it not
1124838fd1498Szrj      for the outer operation.  So try to do the simplification
1124938fd1498Szrj      recursively.  */
1125038fd1498Szrj 
1125138fd1498Szrj   if (outer_op != UNKNOWN)
1125238fd1498Szrj     x = simplify_shift_const_1 (code, shift_mode, varop, count);
1125338fd1498Szrj   else
1125438fd1498Szrj     x = NULL_RTX;
1125538fd1498Szrj 
1125638fd1498Szrj   if (x == NULL_RTX)
1125738fd1498Szrj     x = simplify_gen_binary (code, shift_mode, varop,
1125838fd1498Szrj 			     gen_int_shift_amount (shift_mode, count));
1125938fd1498Szrj 
1126038fd1498Szrj   /* If we were doing an LSHIFTRT in a wider mode than it was originally,
1126138fd1498Szrj      turn off all the bits that the shift would have turned off.  */
1126238fd1498Szrj   if (orig_code == LSHIFTRT && result_mode != shift_mode)
1126338fd1498Szrj     /* We only change the modes of scalar shifts.  */
1126438fd1498Szrj     x = simplify_and_const_int (NULL_RTX, as_a <scalar_int_mode> (shift_mode),
1126538fd1498Szrj 				x, GET_MODE_MASK (result_mode) >> orig_count);
1126638fd1498Szrj 
1126738fd1498Szrj   /* Do the remainder of the processing in RESULT_MODE.  */
1126838fd1498Szrj   x = gen_lowpart_or_truncate (result_mode, x);
1126938fd1498Szrj 
1127038fd1498Szrj   /* If COMPLEMENT_P is set, we have to complement X before doing the outer
1127138fd1498Szrj      operation.  */
1127238fd1498Szrj   if (complement_p)
1127338fd1498Szrj     x = simplify_gen_unary (NOT, result_mode, x, result_mode);
1127438fd1498Szrj 
1127538fd1498Szrj   if (outer_op != UNKNOWN)
1127638fd1498Szrj     {
1127738fd1498Szrj       int_result_mode = as_a <scalar_int_mode> (result_mode);
1127838fd1498Szrj 
1127938fd1498Szrj       if (GET_RTX_CLASS (outer_op) != RTX_UNARY
1128038fd1498Szrj 	  && GET_MODE_PRECISION (int_result_mode) < HOST_BITS_PER_WIDE_INT)
1128138fd1498Szrj 	outer_const = trunc_int_for_mode (outer_const, int_result_mode);
1128238fd1498Szrj 
1128338fd1498Szrj       if (outer_op == AND)
1128438fd1498Szrj 	x = simplify_and_const_int (NULL_RTX, int_result_mode, x, outer_const);
1128538fd1498Szrj       else if (outer_op == SET)
1128638fd1498Szrj 	{
1128738fd1498Szrj 	  /* This means that we have determined that the result is
1128838fd1498Szrj 	     equivalent to a constant.  This should be rare.  */
1128938fd1498Szrj 	  if (!side_effects_p (x))
1129038fd1498Szrj 	    x = GEN_INT (outer_const);
1129138fd1498Szrj 	}
1129238fd1498Szrj       else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
1129338fd1498Szrj 	x = simplify_gen_unary (outer_op, int_result_mode, x, int_result_mode);
1129438fd1498Szrj       else
1129538fd1498Szrj 	x = simplify_gen_binary (outer_op, int_result_mode, x,
1129638fd1498Szrj 				 GEN_INT (outer_const));
1129738fd1498Szrj     }
1129838fd1498Szrj 
1129938fd1498Szrj   return x;
1130038fd1498Szrj }
1130138fd1498Szrj 
1130238fd1498Szrj /* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
1130338fd1498Szrj    The result of the shift is RESULT_MODE.  If we cannot simplify it,
1130438fd1498Szrj    return X or, if it is NULL, synthesize the expression with
1130538fd1498Szrj    simplify_gen_binary.  Otherwise, return a simplified value.
1130638fd1498Szrj 
1130738fd1498Szrj    The shift is normally computed in the widest mode we find in VAROP, as
1130838fd1498Szrj    long as it isn't a different number of words than RESULT_MODE.  Exceptions
1130938fd1498Szrj    are ASHIFTRT and ROTATE, which are always done in their original mode.  */
1131038fd1498Szrj 
1131138fd1498Szrj static rtx
simplify_shift_const(rtx x,enum rtx_code code,machine_mode result_mode,rtx varop,int count)1131238fd1498Szrj simplify_shift_const (rtx x, enum rtx_code code, machine_mode result_mode,
1131338fd1498Szrj 		      rtx varop, int count)
1131438fd1498Szrj {
1131538fd1498Szrj   rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
1131638fd1498Szrj   if (tem)
1131738fd1498Szrj     return tem;
1131838fd1498Szrj 
1131938fd1498Szrj   if (!x)
1132038fd1498Szrj     x = simplify_gen_binary (code, GET_MODE (varop), varop,
1132138fd1498Szrj 			     gen_int_shift_amount (GET_MODE (varop), count));
1132238fd1498Szrj   if (GET_MODE (x) != result_mode)
1132338fd1498Szrj     x = gen_lowpart (result_mode, x);
1132438fd1498Szrj   return x;
1132538fd1498Szrj }
1132638fd1498Szrj 
1132738fd1498Szrj 
1132838fd1498Szrj /* A subroutine of recog_for_combine.  See there for arguments and
1132938fd1498Szrj    return value.  */
1133038fd1498Szrj 
1133138fd1498Szrj static int
recog_for_combine_1(rtx * pnewpat,rtx_insn * insn,rtx * pnotes)1133238fd1498Szrj recog_for_combine_1 (rtx *pnewpat, rtx_insn *insn, rtx *pnotes)
1133338fd1498Szrj {
1133438fd1498Szrj   rtx pat = *pnewpat;
1133538fd1498Szrj   rtx pat_without_clobbers;
1133638fd1498Szrj   int insn_code_number;
1133738fd1498Szrj   int num_clobbers_to_add = 0;
1133838fd1498Szrj   int i;
1133938fd1498Szrj   rtx notes = NULL_RTX;
1134038fd1498Szrj   rtx old_notes, old_pat;
1134138fd1498Szrj   int old_icode;
1134238fd1498Szrj 
1134338fd1498Szrj   /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
1134438fd1498Szrj      we use to indicate that something didn't match.  If we find such a
1134538fd1498Szrj      thing, force rejection.  */
1134638fd1498Szrj   if (GET_CODE (pat) == PARALLEL)
1134738fd1498Szrj     for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
1134838fd1498Szrj       if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
1134938fd1498Szrj 	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
1135038fd1498Szrj 	return -1;
1135138fd1498Szrj 
1135238fd1498Szrj   old_pat = PATTERN (insn);
1135338fd1498Szrj   old_notes = REG_NOTES (insn);
1135438fd1498Szrj   PATTERN (insn) = pat;
1135538fd1498Szrj   REG_NOTES (insn) = NULL_RTX;
1135638fd1498Szrj 
1135738fd1498Szrj   insn_code_number = recog (pat, insn, &num_clobbers_to_add);
1135838fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
1135938fd1498Szrj     {
1136038fd1498Szrj       if (insn_code_number < 0)
1136138fd1498Szrj 	fputs ("Failed to match this instruction:\n", dump_file);
1136238fd1498Szrj       else
1136338fd1498Szrj 	fputs ("Successfully matched this instruction:\n", dump_file);
1136438fd1498Szrj       print_rtl_single (dump_file, pat);
1136538fd1498Szrj     }
1136638fd1498Szrj 
1136738fd1498Szrj   /* If it isn't, there is the possibility that we previously had an insn
1136838fd1498Szrj      that clobbered some register as a side effect, but the combined
1136938fd1498Szrj      insn doesn't need to do that.  So try once more without the clobbers
1137038fd1498Szrj      unless this represents an ASM insn.  */
1137138fd1498Szrj 
1137238fd1498Szrj   if (insn_code_number < 0 && ! check_asm_operands (pat)
1137338fd1498Szrj       && GET_CODE (pat) == PARALLEL)
1137438fd1498Szrj     {
1137538fd1498Szrj       int pos;
1137638fd1498Szrj 
1137738fd1498Szrj       for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
1137838fd1498Szrj 	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
1137938fd1498Szrj 	  {
1138038fd1498Szrj 	    if (i != pos)
1138138fd1498Szrj 	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
1138238fd1498Szrj 	    pos++;
1138338fd1498Szrj 	  }
1138438fd1498Szrj 
1138538fd1498Szrj       SUBST_INT (XVECLEN (pat, 0), pos);
1138638fd1498Szrj 
1138738fd1498Szrj       if (pos == 1)
1138838fd1498Szrj 	pat = XVECEXP (pat, 0, 0);
1138938fd1498Szrj 
1139038fd1498Szrj       PATTERN (insn) = pat;
1139138fd1498Szrj       insn_code_number = recog (pat, insn, &num_clobbers_to_add);
1139238fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
1139338fd1498Szrj 	{
1139438fd1498Szrj 	  if (insn_code_number < 0)
1139538fd1498Szrj 	    fputs ("Failed to match this instruction:\n", dump_file);
1139638fd1498Szrj 	  else
1139738fd1498Szrj 	    fputs ("Successfully matched this instruction:\n", dump_file);
1139838fd1498Szrj 	  print_rtl_single (dump_file, pat);
1139938fd1498Szrj 	}
1140038fd1498Szrj     }
1140138fd1498Szrj 
1140238fd1498Szrj   pat_without_clobbers = pat;
1140338fd1498Szrj 
1140438fd1498Szrj   PATTERN (insn) = old_pat;
1140538fd1498Szrj   REG_NOTES (insn) = old_notes;
1140638fd1498Szrj 
1140738fd1498Szrj   /* Recognize all noop sets, these will be killed by followup pass.  */
1140838fd1498Szrj   if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
1140938fd1498Szrj     insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
1141038fd1498Szrj 
1141138fd1498Szrj   /* If we had any clobbers to add, make a new pattern than contains
1141238fd1498Szrj      them.  Then check to make sure that all of them are dead.  */
1141338fd1498Szrj   if (num_clobbers_to_add)
1141438fd1498Szrj     {
1141538fd1498Szrj       rtx newpat = gen_rtx_PARALLEL (VOIDmode,
1141638fd1498Szrj 				     rtvec_alloc (GET_CODE (pat) == PARALLEL
1141738fd1498Szrj 						  ? (XVECLEN (pat, 0)
1141838fd1498Szrj 						     + num_clobbers_to_add)
1141938fd1498Szrj 						  : num_clobbers_to_add + 1));
1142038fd1498Szrj 
1142138fd1498Szrj       if (GET_CODE (pat) == PARALLEL)
1142238fd1498Szrj 	for (i = 0; i < XVECLEN (pat, 0); i++)
1142338fd1498Szrj 	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
1142438fd1498Szrj       else
1142538fd1498Szrj 	XVECEXP (newpat, 0, 0) = pat;
1142638fd1498Szrj 
1142738fd1498Szrj       add_clobbers (newpat, insn_code_number);
1142838fd1498Szrj 
1142938fd1498Szrj       for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
1143038fd1498Szrj 	   i < XVECLEN (newpat, 0); i++)
1143138fd1498Szrj 	{
1143238fd1498Szrj 	  if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
1143338fd1498Szrj 	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
1143438fd1498Szrj 	    return -1;
1143538fd1498Szrj 	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
1143638fd1498Szrj 	    {
1143738fd1498Szrj 	      gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
1143838fd1498Szrj 	      notes = alloc_reg_note (REG_UNUSED,
1143938fd1498Szrj 				      XEXP (XVECEXP (newpat, 0, i), 0), notes);
1144038fd1498Szrj 	    }
1144138fd1498Szrj 	}
1144238fd1498Szrj       pat = newpat;
1144338fd1498Szrj     }
1144438fd1498Szrj 
1144538fd1498Szrj   if (insn_code_number >= 0
1144638fd1498Szrj       && insn_code_number != NOOP_MOVE_INSN_CODE)
1144738fd1498Szrj     {
1144838fd1498Szrj       old_pat = PATTERN (insn);
1144938fd1498Szrj       old_notes = REG_NOTES (insn);
1145038fd1498Szrj       old_icode = INSN_CODE (insn);
1145138fd1498Szrj       PATTERN (insn) = pat;
1145238fd1498Szrj       REG_NOTES (insn) = notes;
1145338fd1498Szrj       INSN_CODE (insn) = insn_code_number;
1145438fd1498Szrj 
1145538fd1498Szrj       /* Allow targets to reject combined insn.  */
1145638fd1498Szrj       if (!targetm.legitimate_combined_insn (insn))
1145738fd1498Szrj 	{
1145838fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
1145938fd1498Szrj 	    fputs ("Instruction not appropriate for target.",
1146038fd1498Szrj 		   dump_file);
1146138fd1498Szrj 
1146238fd1498Szrj 	  /* Callers expect recog_for_combine to strip
1146338fd1498Szrj 	     clobbers from the pattern on failure.  */
1146438fd1498Szrj 	  pat = pat_without_clobbers;
1146538fd1498Szrj 	  notes = NULL_RTX;
1146638fd1498Szrj 
1146738fd1498Szrj 	  insn_code_number = -1;
1146838fd1498Szrj 	}
1146938fd1498Szrj 
1147038fd1498Szrj       PATTERN (insn) = old_pat;
1147138fd1498Szrj       REG_NOTES (insn) = old_notes;
1147238fd1498Szrj       INSN_CODE (insn) = old_icode;
1147338fd1498Szrj     }
1147438fd1498Szrj 
1147538fd1498Szrj   *pnewpat = pat;
1147638fd1498Szrj   *pnotes = notes;
1147738fd1498Szrj 
1147838fd1498Szrj   return insn_code_number;
1147938fd1498Szrj }
1148038fd1498Szrj 
1148138fd1498Szrj /* Change every ZERO_EXTRACT and ZERO_EXTEND of a SUBREG that can be
1148238fd1498Szrj    expressed as an AND and maybe an LSHIFTRT, to that formulation.
1148338fd1498Szrj    Return whether anything was so changed.  */
1148438fd1498Szrj 
1148538fd1498Szrj static bool
change_zero_ext(rtx pat)1148638fd1498Szrj change_zero_ext (rtx pat)
1148738fd1498Szrj {
1148838fd1498Szrj   bool changed = false;
1148938fd1498Szrj   rtx *src = &SET_SRC (pat);
1149038fd1498Szrj 
1149138fd1498Szrj   subrtx_ptr_iterator::array_type array;
1149238fd1498Szrj   FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
1149338fd1498Szrj     {
1149438fd1498Szrj       rtx x = **iter;
1149538fd1498Szrj       scalar_int_mode mode, inner_mode;
1149638fd1498Szrj       if (!is_a <scalar_int_mode> (GET_MODE (x), &mode))
1149738fd1498Szrj 	continue;
1149838fd1498Szrj       int size;
1149938fd1498Szrj 
1150038fd1498Szrj       if (GET_CODE (x) == ZERO_EXTRACT
1150138fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1))
1150238fd1498Szrj 	  && CONST_INT_P (XEXP (x, 2))
1150338fd1498Szrj 	  && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode)
1150438fd1498Szrj 	  && GET_MODE_PRECISION (inner_mode) <= GET_MODE_PRECISION (mode))
1150538fd1498Szrj 	{
1150638fd1498Szrj 	  size = INTVAL (XEXP (x, 1));
1150738fd1498Szrj 
1150838fd1498Szrj 	  int start = INTVAL (XEXP (x, 2));
1150938fd1498Szrj 	  if (BITS_BIG_ENDIAN)
1151038fd1498Szrj 	    start = GET_MODE_PRECISION (inner_mode) - size - start;
1151138fd1498Szrj 
1151238fd1498Szrj 	  if (start != 0)
1151338fd1498Szrj 	    x = gen_rtx_LSHIFTRT (inner_mode, XEXP (x, 0),
1151438fd1498Szrj 				  gen_int_shift_amount (inner_mode, start));
1151538fd1498Szrj 	  else
1151638fd1498Szrj 	    x = XEXP (x, 0);
1151738fd1498Szrj 
1151838fd1498Szrj 	  if (mode != inner_mode)
1151938fd1498Szrj 	    {
1152038fd1498Szrj 	      if (REG_P (x) && HARD_REGISTER_P (x)
1152138fd1498Szrj 		  && !can_change_dest_mode (x, 0, mode))
1152238fd1498Szrj 		continue;
1152338fd1498Szrj 
1152438fd1498Szrj 	      x = gen_lowpart_SUBREG (mode, x);
1152538fd1498Szrj 	    }
1152638fd1498Szrj 	}
1152738fd1498Szrj       else if (GET_CODE (x) == ZERO_EXTEND
1152838fd1498Szrj 	       && GET_CODE (XEXP (x, 0)) == SUBREG
1152938fd1498Szrj 	       && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (XEXP (x, 0))))
1153038fd1498Szrj 	       && !paradoxical_subreg_p (XEXP (x, 0))
1153138fd1498Szrj 	       && subreg_lowpart_p (XEXP (x, 0)))
1153238fd1498Szrj 	{
1153338fd1498Szrj 	  inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
1153438fd1498Szrj 	  size = GET_MODE_PRECISION (inner_mode);
1153538fd1498Szrj 	  x = SUBREG_REG (XEXP (x, 0));
1153638fd1498Szrj 	  if (GET_MODE (x) != mode)
1153738fd1498Szrj 	    {
1153838fd1498Szrj 	      if (REG_P (x) && HARD_REGISTER_P (x)
1153938fd1498Szrj 		  && !can_change_dest_mode (x, 0, mode))
1154038fd1498Szrj 		continue;
1154138fd1498Szrj 
1154238fd1498Szrj 	      x = gen_lowpart_SUBREG (mode, x);
1154338fd1498Szrj 	    }
1154438fd1498Szrj 	}
1154538fd1498Szrj       else if (GET_CODE (x) == ZERO_EXTEND
1154638fd1498Szrj 	       && REG_P (XEXP (x, 0))
1154738fd1498Szrj 	       && HARD_REGISTER_P (XEXP (x, 0))
1154838fd1498Szrj 	       && can_change_dest_mode (XEXP (x, 0), 0, mode))
1154938fd1498Szrj 	{
1155038fd1498Szrj 	  inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
1155138fd1498Szrj 	  size = GET_MODE_PRECISION (inner_mode);
1155238fd1498Szrj 	  x = gen_rtx_REG (mode, REGNO (XEXP (x, 0)));
1155338fd1498Szrj 	}
1155438fd1498Szrj       else
1155538fd1498Szrj 	continue;
1155638fd1498Szrj 
1155738fd1498Szrj       if (!(GET_CODE (x) == LSHIFTRT
1155838fd1498Szrj 	    && CONST_INT_P (XEXP (x, 1))
1155938fd1498Szrj 	    && size + INTVAL (XEXP (x, 1)) == GET_MODE_PRECISION (mode)))
1156038fd1498Szrj 	{
1156138fd1498Szrj 	  wide_int mask = wi::mask (size, false, GET_MODE_PRECISION (mode));
1156238fd1498Szrj 	  x = gen_rtx_AND (mode, x, immed_wide_int_const (mask, mode));
1156338fd1498Szrj 	}
1156438fd1498Szrj 
1156538fd1498Szrj       SUBST (**iter, x);
1156638fd1498Szrj       changed = true;
1156738fd1498Szrj     }
1156838fd1498Szrj 
1156938fd1498Szrj   if (changed)
1157038fd1498Szrj     FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
1157138fd1498Szrj       maybe_swap_commutative_operands (**iter);
1157238fd1498Szrj 
1157338fd1498Szrj   rtx *dst = &SET_DEST (pat);
1157438fd1498Szrj   scalar_int_mode mode;
1157538fd1498Szrj   if (GET_CODE (*dst) == ZERO_EXTRACT
1157638fd1498Szrj       && REG_P (XEXP (*dst, 0))
1157738fd1498Szrj       && is_a <scalar_int_mode> (GET_MODE (XEXP (*dst, 0)), &mode)
1157838fd1498Szrj       && CONST_INT_P (XEXP (*dst, 1))
1157938fd1498Szrj       && CONST_INT_P (XEXP (*dst, 2)))
1158038fd1498Szrj     {
1158138fd1498Szrj       rtx reg = XEXP (*dst, 0);
1158238fd1498Szrj       int width = INTVAL (XEXP (*dst, 1));
1158338fd1498Szrj       int offset = INTVAL (XEXP (*dst, 2));
1158438fd1498Szrj       int reg_width = GET_MODE_PRECISION (mode);
1158538fd1498Szrj       if (BITS_BIG_ENDIAN)
1158638fd1498Szrj 	offset = reg_width - width - offset;
1158738fd1498Szrj 
1158838fd1498Szrj       rtx x, y, z, w;
1158938fd1498Szrj       wide_int mask = wi::shifted_mask (offset, width, true, reg_width);
1159038fd1498Szrj       wide_int mask2 = wi::shifted_mask (offset, width, false, reg_width);
1159138fd1498Szrj       x = gen_rtx_AND (mode, reg, immed_wide_int_const (mask, mode));
1159238fd1498Szrj       if (offset)
1159338fd1498Szrj 	y = gen_rtx_ASHIFT (mode, SET_SRC (pat), GEN_INT (offset));
1159438fd1498Szrj       else
1159538fd1498Szrj 	y = SET_SRC (pat);
1159638fd1498Szrj       z = gen_rtx_AND (mode, y, immed_wide_int_const (mask2, mode));
1159738fd1498Szrj       w = gen_rtx_IOR (mode, x, z);
1159838fd1498Szrj       SUBST (SET_DEST (pat), reg);
1159938fd1498Szrj       SUBST (SET_SRC (pat), w);
1160038fd1498Szrj 
1160138fd1498Szrj       changed = true;
1160238fd1498Szrj     }
1160338fd1498Szrj 
1160438fd1498Szrj   return changed;
1160538fd1498Szrj }
1160638fd1498Szrj 
1160738fd1498Szrj /* Like recog, but we receive the address of a pointer to a new pattern.
1160838fd1498Szrj    We try to match the rtx that the pointer points to.
1160938fd1498Szrj    If that fails, we may try to modify or replace the pattern,
1161038fd1498Szrj    storing the replacement into the same pointer object.
1161138fd1498Szrj 
1161238fd1498Szrj    Modifications include deletion or addition of CLOBBERs.  If the
1161338fd1498Szrj    instruction will still not match, we change ZERO_EXTEND and ZERO_EXTRACT
1161438fd1498Szrj    to the equivalent AND and perhaps LSHIFTRT patterns, and try with that
1161538fd1498Szrj    (and undo if that fails).
1161638fd1498Szrj 
1161738fd1498Szrj    PNOTES is a pointer to a location where any REG_UNUSED notes added for
1161838fd1498Szrj    the CLOBBERs are placed.
1161938fd1498Szrj 
1162038fd1498Szrj    The value is the final insn code from the pattern ultimately matched,
1162138fd1498Szrj    or -1.  */
1162238fd1498Szrj 
1162338fd1498Szrj static int
recog_for_combine(rtx * pnewpat,rtx_insn * insn,rtx * pnotes)1162438fd1498Szrj recog_for_combine (rtx *pnewpat, rtx_insn *insn, rtx *pnotes)
1162538fd1498Szrj {
1162638fd1498Szrj   rtx pat = *pnewpat;
1162738fd1498Szrj   int insn_code_number = recog_for_combine_1 (pnewpat, insn, pnotes);
1162838fd1498Szrj   if (insn_code_number >= 0 || check_asm_operands (pat))
1162938fd1498Szrj     return insn_code_number;
1163038fd1498Szrj 
1163138fd1498Szrj   void *marker = get_undo_marker ();
1163238fd1498Szrj   bool changed = false;
1163338fd1498Szrj 
1163438fd1498Szrj   if (GET_CODE (pat) == SET)
1163538fd1498Szrj     changed = change_zero_ext (pat);
1163638fd1498Szrj   else if (GET_CODE (pat) == PARALLEL)
1163738fd1498Szrj     {
1163838fd1498Szrj       int i;
1163938fd1498Szrj       for (i = 0; i < XVECLEN (pat, 0); i++)
1164038fd1498Szrj 	{
1164138fd1498Szrj 	  rtx set = XVECEXP (pat, 0, i);
1164238fd1498Szrj 	  if (GET_CODE (set) == SET)
1164338fd1498Szrj 	    changed |= change_zero_ext (set);
1164438fd1498Szrj 	}
1164538fd1498Szrj     }
1164638fd1498Szrj 
1164738fd1498Szrj   if (changed)
1164838fd1498Szrj     {
1164938fd1498Szrj       insn_code_number = recog_for_combine_1 (pnewpat, insn, pnotes);
1165038fd1498Szrj 
1165138fd1498Szrj       if (insn_code_number < 0)
1165238fd1498Szrj 	undo_to_marker (marker);
1165338fd1498Szrj     }
1165438fd1498Szrj 
1165538fd1498Szrj   return insn_code_number;
1165638fd1498Szrj }
1165738fd1498Szrj 
1165838fd1498Szrj /* Like gen_lowpart_general but for use by combine.  In combine it
1165938fd1498Szrj    is not possible to create any new pseudoregs.  However, it is
1166038fd1498Szrj    safe to create invalid memory addresses, because combine will
1166138fd1498Szrj    try to recognize them and all they will do is make the combine
1166238fd1498Szrj    attempt fail.
1166338fd1498Szrj 
1166438fd1498Szrj    If for some reason this cannot do its job, an rtx
1166538fd1498Szrj    (clobber (const_int 0)) is returned.
1166638fd1498Szrj    An insn containing that will not be recognized.  */
1166738fd1498Szrj 
1166838fd1498Szrj static rtx
gen_lowpart_for_combine(machine_mode omode,rtx x)1166938fd1498Szrj gen_lowpart_for_combine (machine_mode omode, rtx x)
1167038fd1498Szrj {
1167138fd1498Szrj   machine_mode imode = GET_MODE (x);
1167238fd1498Szrj   rtx result;
1167338fd1498Szrj 
1167438fd1498Szrj   if (omode == imode)
1167538fd1498Szrj     return x;
1167638fd1498Szrj 
1167738fd1498Szrj   /* We can only support MODE being wider than a word if X is a
1167838fd1498Szrj      constant integer or has a mode the same size.  */
1167938fd1498Szrj   if (maybe_gt (GET_MODE_SIZE (omode), UNITS_PER_WORD)
1168038fd1498Szrj       && ! (CONST_SCALAR_INT_P (x)
1168138fd1498Szrj 	    || known_eq (GET_MODE_SIZE (imode), GET_MODE_SIZE (omode))))
1168238fd1498Szrj     goto fail;
1168338fd1498Szrj 
1168438fd1498Szrj   /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
1168538fd1498Szrj      won't know what to do.  So we will strip off the SUBREG here and
1168638fd1498Szrj      process normally.  */
1168738fd1498Szrj   if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
1168838fd1498Szrj     {
1168938fd1498Szrj       x = SUBREG_REG (x);
1169038fd1498Szrj 
1169138fd1498Szrj       /* For use in case we fall down into the address adjustments
1169238fd1498Szrj 	 further below, we need to adjust the known mode and size of
1169338fd1498Szrj 	 x; imode and isize, since we just adjusted x.  */
1169438fd1498Szrj       imode = GET_MODE (x);
1169538fd1498Szrj 
1169638fd1498Szrj       if (imode == omode)
1169738fd1498Szrj 	return x;
1169838fd1498Szrj     }
1169938fd1498Szrj 
1170038fd1498Szrj   result = gen_lowpart_common (omode, x);
1170138fd1498Szrj 
1170238fd1498Szrj   if (result)
1170338fd1498Szrj     return result;
1170438fd1498Szrj 
1170538fd1498Szrj   if (MEM_P (x))
1170638fd1498Szrj     {
1170738fd1498Szrj       /* Refuse to work on a volatile memory ref or one with a mode-dependent
1170838fd1498Szrj 	 address.  */
1170938fd1498Szrj       if (MEM_VOLATILE_P (x)
1171038fd1498Szrj 	  || mode_dependent_address_p (XEXP (x, 0), MEM_ADDR_SPACE (x)))
1171138fd1498Szrj 	goto fail;
1171238fd1498Szrj 
1171338fd1498Szrj       /* If we want to refer to something bigger than the original memref,
1171438fd1498Szrj 	 generate a paradoxical subreg instead.  That will force a reload
1171538fd1498Szrj 	 of the original memref X.  */
1171638fd1498Szrj       if (paradoxical_subreg_p (omode, imode))
1171738fd1498Szrj 	return gen_rtx_SUBREG (omode, x, 0);
1171838fd1498Szrj 
1171938fd1498Szrj       poly_int64 offset = byte_lowpart_offset (omode, imode);
1172038fd1498Szrj       return adjust_address_nv (x, omode, offset);
1172138fd1498Szrj     }
1172238fd1498Szrj 
1172338fd1498Szrj   /* If X is a comparison operator, rewrite it in a new mode.  This
1172438fd1498Szrj      probably won't match, but may allow further simplifications.  */
1172538fd1498Szrj   else if (COMPARISON_P (x))
1172638fd1498Szrj     return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
1172738fd1498Szrj 
1172838fd1498Szrj   /* If we couldn't simplify X any other way, just enclose it in a
1172938fd1498Szrj      SUBREG.  Normally, this SUBREG won't match, but some patterns may
1173038fd1498Szrj      include an explicit SUBREG or we may simplify it further in combine.  */
1173138fd1498Szrj   else
1173238fd1498Szrj     {
1173338fd1498Szrj       rtx res;
1173438fd1498Szrj 
1173538fd1498Szrj       if (imode == VOIDmode)
1173638fd1498Szrj 	{
1173738fd1498Szrj 	  imode = int_mode_for_mode (omode).require ();
1173838fd1498Szrj 	  x = gen_lowpart_common (imode, x);
1173938fd1498Szrj 	  if (x == NULL)
1174038fd1498Szrj 	    goto fail;
1174138fd1498Szrj 	}
1174238fd1498Szrj       res = lowpart_subreg (omode, x, imode);
1174338fd1498Szrj       if (res)
1174438fd1498Szrj 	return res;
1174538fd1498Szrj     }
1174638fd1498Szrj 
1174738fd1498Szrj  fail:
1174838fd1498Szrj   return gen_rtx_CLOBBER (omode, const0_rtx);
1174938fd1498Szrj }
1175038fd1498Szrj 
1175138fd1498Szrj /* Try to simplify a comparison between OP0 and a constant OP1,
1175238fd1498Szrj    where CODE is the comparison code that will be tested, into a
1175338fd1498Szrj    (CODE OP0 const0_rtx) form.
1175438fd1498Szrj 
1175538fd1498Szrj    The result is a possibly different comparison code to use.
1175638fd1498Szrj    *POP1 may be updated.  */
1175738fd1498Szrj 
1175838fd1498Szrj static enum rtx_code
simplify_compare_const(enum rtx_code code,machine_mode mode,rtx op0,rtx * pop1)1175938fd1498Szrj simplify_compare_const (enum rtx_code code, machine_mode mode,
1176038fd1498Szrj 			rtx op0, rtx *pop1)
1176138fd1498Szrj {
1176238fd1498Szrj   scalar_int_mode int_mode;
1176338fd1498Szrj   HOST_WIDE_INT const_op = INTVAL (*pop1);
1176438fd1498Szrj 
1176538fd1498Szrj   /* Get the constant we are comparing against and turn off all bits
1176638fd1498Szrj      not on in our mode.  */
1176738fd1498Szrj   if (mode != VOIDmode)
1176838fd1498Szrj     const_op = trunc_int_for_mode (const_op, mode);
1176938fd1498Szrj 
1177038fd1498Szrj   /* If we are comparing against a constant power of two and the value
1177138fd1498Szrj      being compared can only have that single bit nonzero (e.g., it was
1177238fd1498Szrj      `and'ed with that bit), we can replace this with a comparison
1177338fd1498Szrj      with zero.  */
1177438fd1498Szrj   if (const_op
1177538fd1498Szrj       && (code == EQ || code == NE || code == GE || code == GEU
1177638fd1498Szrj 	  || code == LT || code == LTU)
1177738fd1498Szrj       && is_a <scalar_int_mode> (mode, &int_mode)
1177838fd1498Szrj       && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
1177938fd1498Szrj       && pow2p_hwi (const_op & GET_MODE_MASK (int_mode))
1178038fd1498Szrj       && (nonzero_bits (op0, int_mode)
1178138fd1498Szrj 	  == (unsigned HOST_WIDE_INT) (const_op & GET_MODE_MASK (int_mode))))
1178238fd1498Szrj     {
1178338fd1498Szrj       code = (code == EQ || code == GE || code == GEU ? NE : EQ);
1178438fd1498Szrj       const_op = 0;
1178538fd1498Szrj     }
1178638fd1498Szrj 
1178738fd1498Szrj   /* Similarly, if we are comparing a value known to be either -1 or
1178838fd1498Szrj      0 with -1, change it to the opposite comparison against zero.  */
1178938fd1498Szrj   if (const_op == -1
1179038fd1498Szrj       && (code == EQ || code == NE || code == GT || code == LE
1179138fd1498Szrj 	  || code == GEU || code == LTU)
1179238fd1498Szrj       && is_a <scalar_int_mode> (mode, &int_mode)
1179338fd1498Szrj       && num_sign_bit_copies (op0, int_mode) == GET_MODE_PRECISION (int_mode))
1179438fd1498Szrj     {
1179538fd1498Szrj       code = (code == EQ || code == LE || code == GEU ? NE : EQ);
1179638fd1498Szrj       const_op = 0;
1179738fd1498Szrj     }
1179838fd1498Szrj 
1179938fd1498Szrj   /* Do some canonicalizations based on the comparison code.  We prefer
1180038fd1498Szrj      comparisons against zero and then prefer equality comparisons.
1180138fd1498Szrj      If we can reduce the size of a constant, we will do that too.  */
1180238fd1498Szrj   switch (code)
1180338fd1498Szrj     {
1180438fd1498Szrj     case LT:
1180538fd1498Szrj       /* < C is equivalent to <= (C - 1) */
1180638fd1498Szrj       if (const_op > 0)
1180738fd1498Szrj 	{
1180838fd1498Szrj 	  const_op -= 1;
1180938fd1498Szrj 	  code = LE;
1181038fd1498Szrj 	  /* ... fall through to LE case below.  */
1181138fd1498Szrj 	  gcc_fallthrough ();
1181238fd1498Szrj 	}
1181338fd1498Szrj       else
1181438fd1498Szrj 	break;
1181538fd1498Szrj 
1181638fd1498Szrj     case LE:
1181738fd1498Szrj       /* <= C is equivalent to < (C + 1); we do this for C < 0  */
1181838fd1498Szrj       if (const_op < 0)
1181938fd1498Szrj 	{
1182038fd1498Szrj 	  const_op += 1;
1182138fd1498Szrj 	  code = LT;
1182238fd1498Szrj 	}
1182338fd1498Szrj 
1182438fd1498Szrj       /* If we are doing a <= 0 comparison on a value known to have
1182538fd1498Szrj 	 a zero sign bit, we can replace this with == 0.  */
1182638fd1498Szrj       else if (const_op == 0
1182738fd1498Szrj 	       && is_a <scalar_int_mode> (mode, &int_mode)
1182838fd1498Szrj 	       && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
1182938fd1498Szrj 	       && (nonzero_bits (op0, int_mode)
1183038fd1498Szrj 		   & (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
1183138fd1498Szrj 	       == 0)
1183238fd1498Szrj 	code = EQ;
1183338fd1498Szrj       break;
1183438fd1498Szrj 
1183538fd1498Szrj     case GE:
1183638fd1498Szrj       /* >= C is equivalent to > (C - 1).  */
1183738fd1498Szrj       if (const_op > 0)
1183838fd1498Szrj 	{
1183938fd1498Szrj 	  const_op -= 1;
1184038fd1498Szrj 	  code = GT;
1184138fd1498Szrj 	  /* ... fall through to GT below.  */
1184238fd1498Szrj 	  gcc_fallthrough ();
1184338fd1498Szrj 	}
1184438fd1498Szrj       else
1184538fd1498Szrj 	break;
1184638fd1498Szrj 
1184738fd1498Szrj     case GT:
1184838fd1498Szrj       /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
1184938fd1498Szrj       if (const_op < 0)
1185038fd1498Szrj 	{
1185138fd1498Szrj 	  const_op += 1;
1185238fd1498Szrj 	  code = GE;
1185338fd1498Szrj 	}
1185438fd1498Szrj 
1185538fd1498Szrj       /* If we are doing a > 0 comparison on a value known to have
1185638fd1498Szrj 	 a zero sign bit, we can replace this with != 0.  */
1185738fd1498Szrj       else if (const_op == 0
1185838fd1498Szrj 	       && is_a <scalar_int_mode> (mode, &int_mode)
1185938fd1498Szrj 	       && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
1186038fd1498Szrj 	       && (nonzero_bits (op0, int_mode)
1186138fd1498Szrj 		   & (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
1186238fd1498Szrj 	       == 0)
1186338fd1498Szrj 	code = NE;
1186438fd1498Szrj       break;
1186538fd1498Szrj 
1186638fd1498Szrj     case LTU:
1186738fd1498Szrj       /* < C is equivalent to <= (C - 1).  */
1186838fd1498Szrj       if (const_op > 0)
1186938fd1498Szrj 	{
1187038fd1498Szrj 	  const_op -= 1;
1187138fd1498Szrj 	  code = LEU;
1187238fd1498Szrj 	  /* ... fall through ...  */
1187338fd1498Szrj 	  gcc_fallthrough ();
1187438fd1498Szrj 	}
1187538fd1498Szrj       /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
1187638fd1498Szrj       else if (is_a <scalar_int_mode> (mode, &int_mode)
1187738fd1498Szrj 	       && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
1187838fd1498Szrj 	       && ((unsigned HOST_WIDE_INT) const_op
1187938fd1498Szrj 		   == HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
1188038fd1498Szrj 	{
1188138fd1498Szrj 	  const_op = 0;
1188238fd1498Szrj 	  code = GE;
1188338fd1498Szrj 	  break;
1188438fd1498Szrj 	}
1188538fd1498Szrj       else
1188638fd1498Szrj 	break;
1188738fd1498Szrj 
1188838fd1498Szrj     case LEU:
1188938fd1498Szrj       /* unsigned <= 0 is equivalent to == 0 */
1189038fd1498Szrj       if (const_op == 0)
1189138fd1498Szrj 	code = EQ;
1189238fd1498Szrj       /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
1189338fd1498Szrj       else if (is_a <scalar_int_mode> (mode, &int_mode)
1189438fd1498Szrj 	       && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
1189538fd1498Szrj 	       && ((unsigned HOST_WIDE_INT) const_op
1189638fd1498Szrj 		   == ((HOST_WIDE_INT_1U
1189738fd1498Szrj 			<< (GET_MODE_PRECISION (int_mode) - 1)) - 1)))
1189838fd1498Szrj 	{
1189938fd1498Szrj 	  const_op = 0;
1190038fd1498Szrj 	  code = GE;
1190138fd1498Szrj 	}
1190238fd1498Szrj       break;
1190338fd1498Szrj 
1190438fd1498Szrj     case GEU:
1190538fd1498Szrj       /* >= C is equivalent to > (C - 1).  */
1190638fd1498Szrj       if (const_op > 1)
1190738fd1498Szrj 	{
1190838fd1498Szrj 	  const_op -= 1;
1190938fd1498Szrj 	  code = GTU;
1191038fd1498Szrj 	  /* ... fall through ...  */
1191138fd1498Szrj 	  gcc_fallthrough ();
1191238fd1498Szrj 	}
1191338fd1498Szrj 
1191438fd1498Szrj       /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
1191538fd1498Szrj       else if (is_a <scalar_int_mode> (mode, &int_mode)
1191638fd1498Szrj 	       && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
1191738fd1498Szrj 	       && ((unsigned HOST_WIDE_INT) const_op
1191838fd1498Szrj 		   == HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
1191938fd1498Szrj 	{
1192038fd1498Szrj 	  const_op = 0;
1192138fd1498Szrj 	  code = LT;
1192238fd1498Szrj 	  break;
1192338fd1498Szrj 	}
1192438fd1498Szrj       else
1192538fd1498Szrj 	break;
1192638fd1498Szrj 
1192738fd1498Szrj     case GTU:
1192838fd1498Szrj       /* unsigned > 0 is equivalent to != 0 */
1192938fd1498Szrj       if (const_op == 0)
1193038fd1498Szrj 	code = NE;
1193138fd1498Szrj       /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
1193238fd1498Szrj       else if (is_a <scalar_int_mode> (mode, &int_mode)
1193338fd1498Szrj 	       && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
1193438fd1498Szrj 	       && ((unsigned HOST_WIDE_INT) const_op
1193538fd1498Szrj 		   == (HOST_WIDE_INT_1U
1193638fd1498Szrj 		       << (GET_MODE_PRECISION (int_mode) - 1)) - 1))
1193738fd1498Szrj 	{
1193838fd1498Szrj 	  const_op = 0;
1193938fd1498Szrj 	  code = LT;
1194038fd1498Szrj 	}
1194138fd1498Szrj       break;
1194238fd1498Szrj 
1194338fd1498Szrj     default:
1194438fd1498Szrj       break;
1194538fd1498Szrj     }
1194638fd1498Szrj 
1194738fd1498Szrj   *pop1 = GEN_INT (const_op);
1194838fd1498Szrj   return code;
1194938fd1498Szrj }
1195038fd1498Szrj 
1195138fd1498Szrj /* Simplify a comparison between *POP0 and *POP1 where CODE is the
1195238fd1498Szrj    comparison code that will be tested.
1195338fd1498Szrj 
1195438fd1498Szrj    The result is a possibly different comparison code to use.  *POP0 and
1195538fd1498Szrj    *POP1 may be updated.
1195638fd1498Szrj 
1195738fd1498Szrj    It is possible that we might detect that a comparison is either always
1195838fd1498Szrj    true or always false.  However, we do not perform general constant
1195938fd1498Szrj    folding in combine, so this knowledge isn't useful.  Such tautologies
1196038fd1498Szrj    should have been detected earlier.  Hence we ignore all such cases.  */
1196138fd1498Szrj 
1196238fd1498Szrj static enum rtx_code
simplify_comparison(enum rtx_code code,rtx * pop0,rtx * pop1)1196338fd1498Szrj simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
1196438fd1498Szrj {
1196538fd1498Szrj   rtx op0 = *pop0;
1196638fd1498Szrj   rtx op1 = *pop1;
1196738fd1498Szrj   rtx tem, tem1;
1196838fd1498Szrj   int i;
1196938fd1498Szrj   scalar_int_mode mode, inner_mode, tmode;
1197038fd1498Szrj   opt_scalar_int_mode tmode_iter;
1197138fd1498Szrj 
1197238fd1498Szrj   /* Try a few ways of applying the same transformation to both operands.  */
1197338fd1498Szrj   while (1)
1197438fd1498Szrj     {
1197538fd1498Szrj       /* The test below this one won't handle SIGN_EXTENDs on these machines,
1197638fd1498Szrj 	 so check specially.  */
1197738fd1498Szrj       if (!WORD_REGISTER_OPERATIONS
1197838fd1498Szrj 	  && code != GTU && code != GEU && code != LTU && code != LEU
1197938fd1498Szrj 	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
1198038fd1498Szrj 	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
1198138fd1498Szrj 	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
1198238fd1498Szrj 	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
1198338fd1498Szrj 	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
1198438fd1498Szrj 	  && is_a <scalar_int_mode> (GET_MODE (op0), &mode)
1198538fd1498Szrj 	  && (is_a <scalar_int_mode>
1198638fd1498Szrj 	      (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))), &inner_mode))
1198738fd1498Szrj 	  && inner_mode == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))
1198838fd1498Szrj 	  && CONST_INT_P (XEXP (op0, 1))
1198938fd1498Szrj 	  && XEXP (op0, 1) == XEXP (op1, 1)
1199038fd1498Szrj 	  && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
1199138fd1498Szrj 	  && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
1199238fd1498Szrj 	  && (INTVAL (XEXP (op0, 1))
1199338fd1498Szrj 	      == (GET_MODE_PRECISION (mode)
1199438fd1498Szrj 		  - GET_MODE_PRECISION (inner_mode))))
1199538fd1498Szrj 	{
1199638fd1498Szrj 	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
1199738fd1498Szrj 	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
1199838fd1498Szrj 	}
1199938fd1498Szrj 
1200038fd1498Szrj       /* If both operands are the same constant shift, see if we can ignore the
1200138fd1498Szrj 	 shift.  We can if the shift is a rotate or if the bits shifted out of
1200238fd1498Szrj 	 this shift are known to be zero for both inputs and if the type of
1200338fd1498Szrj 	 comparison is compatible with the shift.  */
1200438fd1498Szrj       if (GET_CODE (op0) == GET_CODE (op1)
1200538fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
1200638fd1498Szrj 	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
1200738fd1498Szrj 	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
1200838fd1498Szrj 		  && (code != GT && code != LT && code != GE && code != LE))
1200938fd1498Szrj 	      || (GET_CODE (op0) == ASHIFTRT
1201038fd1498Szrj 		  && (code != GTU && code != LTU
1201138fd1498Szrj 		      && code != GEU && code != LEU)))
1201238fd1498Szrj 	  && CONST_INT_P (XEXP (op0, 1))
1201338fd1498Szrj 	  && INTVAL (XEXP (op0, 1)) >= 0
1201438fd1498Szrj 	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
1201538fd1498Szrj 	  && XEXP (op0, 1) == XEXP (op1, 1))
1201638fd1498Szrj 	{
1201738fd1498Szrj 	  machine_mode mode = GET_MODE (op0);
1201838fd1498Szrj 	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
1201938fd1498Szrj 	  int shift_count = INTVAL (XEXP (op0, 1));
1202038fd1498Szrj 
1202138fd1498Szrj 	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
1202238fd1498Szrj 	    mask &= (mask >> shift_count) << shift_count;
1202338fd1498Szrj 	  else if (GET_CODE (op0) == ASHIFT)
1202438fd1498Szrj 	    mask = (mask & (mask << shift_count)) >> shift_count;
1202538fd1498Szrj 
1202638fd1498Szrj 	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
1202738fd1498Szrj 	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
1202838fd1498Szrj 	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
1202938fd1498Szrj 	  else
1203038fd1498Szrj 	    break;
1203138fd1498Szrj 	}
1203238fd1498Szrj 
1203338fd1498Szrj       /* If both operands are AND's of a paradoxical SUBREG by constant, the
1203438fd1498Szrj 	 SUBREGs are of the same mode, and, in both cases, the AND would
1203538fd1498Szrj 	 be redundant if the comparison was done in the narrower mode,
1203638fd1498Szrj 	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
1203738fd1498Szrj 	 and the operand's possibly nonzero bits are 0xffffff01; in that case
1203838fd1498Szrj 	 if we only care about QImode, we don't need the AND).  This case
1203938fd1498Szrj 	 occurs if the output mode of an scc insn is not SImode and
1204038fd1498Szrj 	 STORE_FLAG_VALUE == 1 (e.g., the 386).
1204138fd1498Szrj 
1204238fd1498Szrj 	 Similarly, check for a case where the AND's are ZERO_EXTEND
1204338fd1498Szrj 	 operations from some narrower mode even though a SUBREG is not
1204438fd1498Szrj 	 present.  */
1204538fd1498Szrj 
1204638fd1498Szrj       else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
1204738fd1498Szrj 	       && CONST_INT_P (XEXP (op0, 1))
1204838fd1498Szrj 	       && CONST_INT_P (XEXP (op1, 1)))
1204938fd1498Szrj 	{
1205038fd1498Szrj 	  rtx inner_op0 = XEXP (op0, 0);
1205138fd1498Szrj 	  rtx inner_op1 = XEXP (op1, 0);
1205238fd1498Szrj 	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
1205338fd1498Szrj 	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
1205438fd1498Szrj 	  int changed = 0;
1205538fd1498Szrj 
1205638fd1498Szrj 	  if (paradoxical_subreg_p (inner_op0)
1205738fd1498Szrj 	      && GET_CODE (inner_op1) == SUBREG
1205838fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (GET_MODE (SUBREG_REG (inner_op0)))
1205938fd1498Szrj 	      && (GET_MODE (SUBREG_REG (inner_op0))
1206038fd1498Szrj 		  == GET_MODE (SUBREG_REG (inner_op1)))
1206138fd1498Szrj 	      && ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
1206238fd1498Szrj 					GET_MODE (SUBREG_REG (inner_op0)))) == 0
1206338fd1498Szrj 	      && ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
1206438fd1498Szrj 					GET_MODE (SUBREG_REG (inner_op1)))) == 0)
1206538fd1498Szrj 	    {
1206638fd1498Szrj 	      op0 = SUBREG_REG (inner_op0);
1206738fd1498Szrj 	      op1 = SUBREG_REG (inner_op1);
1206838fd1498Szrj 
1206938fd1498Szrj 	      /* The resulting comparison is always unsigned since we masked
1207038fd1498Szrj 		 off the original sign bit.  */
1207138fd1498Szrj 	      code = unsigned_condition (code);
1207238fd1498Szrj 
1207338fd1498Szrj 	      changed = 1;
1207438fd1498Szrj 	    }
1207538fd1498Szrj 
1207638fd1498Szrj 	  else if (c0 == c1)
1207738fd1498Szrj 	    FOR_EACH_MODE_UNTIL (tmode,
1207838fd1498Szrj 				 as_a <scalar_int_mode> (GET_MODE (op0)))
1207938fd1498Szrj 	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
1208038fd1498Szrj 		{
1208138fd1498Szrj 		  op0 = gen_lowpart_or_truncate (tmode, inner_op0);
1208238fd1498Szrj 		  op1 = gen_lowpart_or_truncate (tmode, inner_op1);
1208338fd1498Szrj 		  code = unsigned_condition (code);
1208438fd1498Szrj 		  changed = 1;
1208538fd1498Szrj 		  break;
1208638fd1498Szrj 		}
1208738fd1498Szrj 
1208838fd1498Szrj 	  if (! changed)
1208938fd1498Szrj 	    break;
1209038fd1498Szrj 	}
1209138fd1498Szrj 
1209238fd1498Szrj       /* If both operands are NOT, we can strip off the outer operation
1209338fd1498Szrj 	 and adjust the comparison code for swapped operands; similarly for
1209438fd1498Szrj 	 NEG, except that this must be an equality comparison.  */
1209538fd1498Szrj       else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
1209638fd1498Szrj 	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
1209738fd1498Szrj 		   && (code == EQ || code == NE)))
1209838fd1498Szrj 	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
1209938fd1498Szrj 
1210038fd1498Szrj       else
1210138fd1498Szrj 	break;
1210238fd1498Szrj     }
1210338fd1498Szrj 
1210438fd1498Szrj   /* If the first operand is a constant, swap the operands and adjust the
1210538fd1498Szrj      comparison code appropriately, but don't do this if the second operand
1210638fd1498Szrj      is already a constant integer.  */
1210738fd1498Szrj   if (swap_commutative_operands_p (op0, op1))
1210838fd1498Szrj     {
1210938fd1498Szrj       std::swap (op0, op1);
1211038fd1498Szrj       code = swap_condition (code);
1211138fd1498Szrj     }
1211238fd1498Szrj 
1211338fd1498Szrj   /* We now enter a loop during which we will try to simplify the comparison.
1211438fd1498Szrj      For the most part, we only are concerned with comparisons with zero,
1211538fd1498Szrj      but some things may really be comparisons with zero but not start
1211638fd1498Szrj      out looking that way.  */
1211738fd1498Szrj 
1211838fd1498Szrj   while (CONST_INT_P (op1))
1211938fd1498Szrj     {
1212038fd1498Szrj       machine_mode raw_mode = GET_MODE (op0);
1212138fd1498Szrj       scalar_int_mode int_mode;
1212238fd1498Szrj       int equality_comparison_p;
1212338fd1498Szrj       int sign_bit_comparison_p;
1212438fd1498Szrj       int unsigned_comparison_p;
1212538fd1498Szrj       HOST_WIDE_INT const_op;
1212638fd1498Szrj 
1212738fd1498Szrj       /* We only want to handle integral modes.  This catches VOIDmode,
1212838fd1498Szrj 	 CCmode, and the floating-point modes.  An exception is that we
1212938fd1498Szrj 	 can handle VOIDmode if OP0 is a COMPARE or a comparison
1213038fd1498Szrj 	 operation.  */
1213138fd1498Szrj 
1213238fd1498Szrj       if (GET_MODE_CLASS (raw_mode) != MODE_INT
1213338fd1498Szrj 	  && ! (raw_mode == VOIDmode
1213438fd1498Szrj 		&& (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
1213538fd1498Szrj 	break;
1213638fd1498Szrj 
1213738fd1498Szrj       /* Try to simplify the compare to constant, possibly changing the
1213838fd1498Szrj 	 comparison op, and/or changing op1 to zero.  */
1213938fd1498Szrj       code = simplify_compare_const (code, raw_mode, op0, &op1);
1214038fd1498Szrj       const_op = INTVAL (op1);
1214138fd1498Szrj 
1214238fd1498Szrj       /* Compute some predicates to simplify code below.  */
1214338fd1498Szrj 
1214438fd1498Szrj       equality_comparison_p = (code == EQ || code == NE);
1214538fd1498Szrj       sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
1214638fd1498Szrj       unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
1214738fd1498Szrj 			       || code == GEU);
1214838fd1498Szrj 
1214938fd1498Szrj       /* If this is a sign bit comparison and we can do arithmetic in
1215038fd1498Szrj 	 MODE, say that we will only be needing the sign bit of OP0.  */
1215138fd1498Szrj       if (sign_bit_comparison_p
1215238fd1498Szrj 	  && is_a <scalar_int_mode> (raw_mode, &int_mode)
1215338fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (int_mode))
1215438fd1498Szrj 	op0 = force_to_mode (op0, int_mode,
1215538fd1498Szrj 			     HOST_WIDE_INT_1U
1215638fd1498Szrj 			     << (GET_MODE_PRECISION (int_mode) - 1),
1215738fd1498Szrj 			     0);
1215838fd1498Szrj 
1215938fd1498Szrj       if (COMPARISON_P (op0))
1216038fd1498Szrj 	{
1216138fd1498Szrj 	  /* We can't do anything if OP0 is a condition code value, rather
1216238fd1498Szrj 	     than an actual data value.  */
1216338fd1498Szrj 	  if (const_op != 0
1216438fd1498Szrj 	      || CC0_P (XEXP (op0, 0))
1216538fd1498Szrj 	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
1216638fd1498Szrj 	    break;
1216738fd1498Szrj 
1216838fd1498Szrj 	  /* Get the two operands being compared.  */
1216938fd1498Szrj 	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
1217038fd1498Szrj 	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
1217138fd1498Szrj 	  else
1217238fd1498Szrj 	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
1217338fd1498Szrj 
1217438fd1498Szrj 	  /* Check for the cases where we simply want the result of the
1217538fd1498Szrj 	     earlier test or the opposite of that result.  */
1217638fd1498Szrj 	  if (code == NE || code == EQ
1217738fd1498Szrj 	      || (val_signbit_known_set_p (raw_mode, STORE_FLAG_VALUE)
1217838fd1498Szrj 		  && (code == LT || code == GE)))
1217938fd1498Szrj 	    {
1218038fd1498Szrj 	      enum rtx_code new_code;
1218138fd1498Szrj 	      if (code == LT || code == NE)
1218238fd1498Szrj 		new_code = GET_CODE (op0);
1218338fd1498Szrj 	      else
1218438fd1498Szrj 		new_code = reversed_comparison_code (op0, NULL);
1218538fd1498Szrj 
1218638fd1498Szrj 	      if (new_code != UNKNOWN)
1218738fd1498Szrj 		{
1218838fd1498Szrj 		  code = new_code;
1218938fd1498Szrj 		  op0 = tem;
1219038fd1498Szrj 		  op1 = tem1;
1219138fd1498Szrj 		  continue;
1219238fd1498Szrj 		}
1219338fd1498Szrj 	    }
1219438fd1498Szrj 	  break;
1219538fd1498Szrj 	}
1219638fd1498Szrj 
1219738fd1498Szrj       if (raw_mode == VOIDmode)
1219838fd1498Szrj 	break;
1219938fd1498Szrj       scalar_int_mode mode = as_a <scalar_int_mode> (raw_mode);
1220038fd1498Szrj 
1220138fd1498Szrj       /* Now try cases based on the opcode of OP0.  If none of the cases
1220238fd1498Szrj 	 does a "continue", we exit this loop immediately after the
1220338fd1498Szrj 	 switch.  */
1220438fd1498Szrj 
1220538fd1498Szrj       unsigned int mode_width = GET_MODE_PRECISION (mode);
1220638fd1498Szrj       unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
1220738fd1498Szrj       switch (GET_CODE (op0))
1220838fd1498Szrj 	{
1220938fd1498Szrj 	case ZERO_EXTRACT:
1221038fd1498Szrj 	  /* If we are extracting a single bit from a variable position in
1221138fd1498Szrj 	     a constant that has only a single bit set and are comparing it
1221238fd1498Szrj 	     with zero, we can convert this into an equality comparison
1221338fd1498Szrj 	     between the position and the location of the single bit.  */
1221438fd1498Szrj 	  /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
1221538fd1498Szrj 	     have already reduced the shift count modulo the word size.  */
1221638fd1498Szrj 	  if (!SHIFT_COUNT_TRUNCATED
1221738fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 0))
1221838fd1498Szrj 	      && XEXP (op0, 1) == const1_rtx
1221938fd1498Szrj 	      && equality_comparison_p && const_op == 0
1222038fd1498Szrj 	      && (i = exact_log2 (UINTVAL (XEXP (op0, 0)))) >= 0)
1222138fd1498Szrj 	    {
1222238fd1498Szrj 	      if (BITS_BIG_ENDIAN)
1222338fd1498Szrj 		i = BITS_PER_WORD - 1 - i;
1222438fd1498Szrj 
1222538fd1498Szrj 	      op0 = XEXP (op0, 2);
1222638fd1498Szrj 	      op1 = GEN_INT (i);
1222738fd1498Szrj 	      const_op = i;
1222838fd1498Szrj 
1222938fd1498Szrj 	      /* Result is nonzero iff shift count is equal to I.  */
1223038fd1498Szrj 	      code = reverse_condition (code);
1223138fd1498Szrj 	      continue;
1223238fd1498Szrj 	    }
1223338fd1498Szrj 
1223438fd1498Szrj 	  /* fall through */
1223538fd1498Szrj 
1223638fd1498Szrj 	case SIGN_EXTRACT:
1223738fd1498Szrj 	  tem = expand_compound_operation (op0);
1223838fd1498Szrj 	  if (tem != op0)
1223938fd1498Szrj 	    {
1224038fd1498Szrj 	      op0 = tem;
1224138fd1498Szrj 	      continue;
1224238fd1498Szrj 	    }
1224338fd1498Szrj 	  break;
1224438fd1498Szrj 
1224538fd1498Szrj 	case NOT:
1224638fd1498Szrj 	  /* If testing for equality, we can take the NOT of the constant.  */
1224738fd1498Szrj 	  if (equality_comparison_p
1224838fd1498Szrj 	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
1224938fd1498Szrj 	    {
1225038fd1498Szrj 	      op0 = XEXP (op0, 0);
1225138fd1498Szrj 	      op1 = tem;
1225238fd1498Szrj 	      continue;
1225338fd1498Szrj 	    }
1225438fd1498Szrj 
1225538fd1498Szrj 	  /* If just looking at the sign bit, reverse the sense of the
1225638fd1498Szrj 	     comparison.  */
1225738fd1498Szrj 	  if (sign_bit_comparison_p)
1225838fd1498Szrj 	    {
1225938fd1498Szrj 	      op0 = XEXP (op0, 0);
1226038fd1498Szrj 	      code = (code == GE ? LT : GE);
1226138fd1498Szrj 	      continue;
1226238fd1498Szrj 	    }
1226338fd1498Szrj 	  break;
1226438fd1498Szrj 
1226538fd1498Szrj 	case NEG:
1226638fd1498Szrj 	  /* If testing for equality, we can take the NEG of the constant.  */
1226738fd1498Szrj 	  if (equality_comparison_p
1226838fd1498Szrj 	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
1226938fd1498Szrj 	    {
1227038fd1498Szrj 	      op0 = XEXP (op0, 0);
1227138fd1498Szrj 	      op1 = tem;
1227238fd1498Szrj 	      continue;
1227338fd1498Szrj 	    }
1227438fd1498Szrj 
1227538fd1498Szrj 	  /* The remaining cases only apply to comparisons with zero.  */
1227638fd1498Szrj 	  if (const_op != 0)
1227738fd1498Szrj 	    break;
1227838fd1498Szrj 
1227938fd1498Szrj 	  /* When X is ABS or is known positive,
1228038fd1498Szrj 	     (neg X) is < 0 if and only if X != 0.  */
1228138fd1498Szrj 
1228238fd1498Szrj 	  if (sign_bit_comparison_p
1228338fd1498Szrj 	      && (GET_CODE (XEXP (op0, 0)) == ABS
1228438fd1498Szrj 		  || (mode_width <= HOST_BITS_PER_WIDE_INT
1228538fd1498Szrj 		      && (nonzero_bits (XEXP (op0, 0), mode)
1228638fd1498Szrj 			  & (HOST_WIDE_INT_1U << (mode_width - 1)))
1228738fd1498Szrj 			 == 0)))
1228838fd1498Szrj 	    {
1228938fd1498Szrj 	      op0 = XEXP (op0, 0);
1229038fd1498Szrj 	      code = (code == LT ? NE : EQ);
1229138fd1498Szrj 	      continue;
1229238fd1498Szrj 	    }
1229338fd1498Szrj 
1229438fd1498Szrj 	  /* If we have NEG of something whose two high-order bits are the
1229538fd1498Szrj 	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
1229638fd1498Szrj 	  if (num_sign_bit_copies (op0, mode) >= 2)
1229738fd1498Szrj 	    {
1229838fd1498Szrj 	      op0 = XEXP (op0, 0);
1229938fd1498Szrj 	      code = swap_condition (code);
1230038fd1498Szrj 	      continue;
1230138fd1498Szrj 	    }
1230238fd1498Szrj 	  break;
1230338fd1498Szrj 
1230438fd1498Szrj 	case ROTATE:
1230538fd1498Szrj 	  /* If we are testing equality and our count is a constant, we
1230638fd1498Szrj 	     can perform the inverse operation on our RHS.  */
1230738fd1498Szrj 	  if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
1230838fd1498Szrj 	      && (tem = simplify_binary_operation (ROTATERT, mode,
1230938fd1498Szrj 						   op1, XEXP (op0, 1))) != 0)
1231038fd1498Szrj 	    {
1231138fd1498Szrj 	      op0 = XEXP (op0, 0);
1231238fd1498Szrj 	      op1 = tem;
1231338fd1498Szrj 	      continue;
1231438fd1498Szrj 	    }
1231538fd1498Szrj 
1231638fd1498Szrj 	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
1231738fd1498Szrj 	     a particular bit.  Convert it to an AND of a constant of that
1231838fd1498Szrj 	     bit.  This will be converted into a ZERO_EXTRACT.  */
1231938fd1498Szrj 	  if (const_op == 0 && sign_bit_comparison_p
1232038fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1))
1232138fd1498Szrj 	      && mode_width <= HOST_BITS_PER_WIDE_INT)
1232238fd1498Szrj 	    {
1232338fd1498Szrj 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
1232438fd1498Szrj 					    (HOST_WIDE_INT_1U
1232538fd1498Szrj 					     << (mode_width - 1
1232638fd1498Szrj 						 - INTVAL (XEXP (op0, 1)))));
1232738fd1498Szrj 	      code = (code == LT ? NE : EQ);
1232838fd1498Szrj 	      continue;
1232938fd1498Szrj 	    }
1233038fd1498Szrj 
1233138fd1498Szrj 	  /* Fall through.  */
1233238fd1498Szrj 
1233338fd1498Szrj 	case ABS:
1233438fd1498Szrj 	  /* ABS is ignorable inside an equality comparison with zero.  */
1233538fd1498Szrj 	  if (const_op == 0 && equality_comparison_p)
1233638fd1498Szrj 	    {
1233738fd1498Szrj 	      op0 = XEXP (op0, 0);
1233838fd1498Szrj 	      continue;
1233938fd1498Szrj 	    }
1234038fd1498Szrj 	  break;
1234138fd1498Szrj 
1234238fd1498Szrj 	case SIGN_EXTEND:
1234338fd1498Szrj 	  /* Can simplify (compare (zero/sign_extend FOO) CONST) to
1234438fd1498Szrj 	     (compare FOO CONST) if CONST fits in FOO's mode and we
1234538fd1498Szrj 	     are either testing inequality or have an unsigned
1234638fd1498Szrj 	     comparison with ZERO_EXTEND or a signed comparison with
1234738fd1498Szrj 	     SIGN_EXTEND.  But don't do it if we don't have a compare
1234838fd1498Szrj 	     insn of the given mode, since we'd have to revert it
1234938fd1498Szrj 	     later on, and then we wouldn't know whether to sign- or
1235038fd1498Szrj 	     zero-extend.  */
1235138fd1498Szrj 	  if (is_int_mode (GET_MODE (XEXP (op0, 0)), &mode)
1235238fd1498Szrj 	      && ! unsigned_comparison_p
1235338fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (mode)
1235438fd1498Szrj 	      && trunc_int_for_mode (const_op, mode) == const_op
1235538fd1498Szrj 	      && have_insn_for (COMPARE, mode))
1235638fd1498Szrj 	    {
1235738fd1498Szrj 	      op0 = XEXP (op0, 0);
1235838fd1498Szrj 	      continue;
1235938fd1498Szrj 	    }
1236038fd1498Szrj 	  break;
1236138fd1498Szrj 
1236238fd1498Szrj 	case SUBREG:
1236338fd1498Szrj 	  /* Check for the case where we are comparing A - C1 with C2, that is
1236438fd1498Szrj 
1236538fd1498Szrj 	       (subreg:MODE (plus (A) (-C1))) op (C2)
1236638fd1498Szrj 
1236738fd1498Szrj 	     with C1 a constant, and try to lift the SUBREG, i.e. to do the
1236838fd1498Szrj 	     comparison in the wider mode.  One of the following two conditions
1236938fd1498Szrj 	     must be true in order for this to be valid:
1237038fd1498Szrj 
1237138fd1498Szrj 	       1. The mode extension results in the same bit pattern being added
1237238fd1498Szrj 		  on both sides and the comparison is equality or unsigned.  As
1237338fd1498Szrj 		  C2 has been truncated to fit in MODE, the pattern can only be
1237438fd1498Szrj 		  all 0s or all 1s.
1237538fd1498Szrj 
1237638fd1498Szrj 	       2. The mode extension results in the sign bit being copied on
1237738fd1498Szrj 		  each side.
1237838fd1498Szrj 
1237938fd1498Szrj 	     The difficulty here is that we have predicates for A but not for
1238038fd1498Szrj 	     (A - C1) so we need to check that C1 is within proper bounds so
1238138fd1498Szrj 	     as to perturbate A as little as possible.  */
1238238fd1498Szrj 
1238338fd1498Szrj 	  if (mode_width <= HOST_BITS_PER_WIDE_INT
1238438fd1498Szrj 	      && subreg_lowpart_p (op0)
1238538fd1498Szrj 	      && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
1238638fd1498Szrj 					 &inner_mode)
1238738fd1498Szrj 	      && GET_MODE_PRECISION (inner_mode) > mode_width
1238838fd1498Szrj 	      && GET_CODE (SUBREG_REG (op0)) == PLUS
1238938fd1498Szrj 	      && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
1239038fd1498Szrj 	    {
1239138fd1498Szrj 	      rtx a = XEXP (SUBREG_REG (op0), 0);
1239238fd1498Szrj 	      HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
1239338fd1498Szrj 
1239438fd1498Szrj 	      if ((c1 > 0
1239538fd1498Szrj 		   && (unsigned HOST_WIDE_INT) c1
1239638fd1498Szrj 		       < HOST_WIDE_INT_1U << (mode_width - 1)
1239738fd1498Szrj 		   && (equality_comparison_p || unsigned_comparison_p)
1239838fd1498Szrj 		   /* (A - C1) zero-extends if it is positive and sign-extends
1239938fd1498Szrj 		      if it is negative, C2 both zero- and sign-extends.  */
1240038fd1498Szrj 		   && (((nonzero_bits (a, inner_mode)
1240138fd1498Szrj 			 & ~GET_MODE_MASK (mode)) == 0
1240238fd1498Szrj 			&& const_op >= 0)
1240338fd1498Szrj 		       /* (A - C1) sign-extends if it is positive and 1-extends
1240438fd1498Szrj 			  if it is negative, C2 both sign- and 1-extends.  */
1240538fd1498Szrj 		       || (num_sign_bit_copies (a, inner_mode)
1240638fd1498Szrj 			   > (unsigned int) (GET_MODE_PRECISION (inner_mode)
1240738fd1498Szrj 					     - mode_width)
1240838fd1498Szrj 			   && const_op < 0)))
1240938fd1498Szrj 		  || ((unsigned HOST_WIDE_INT) c1
1241038fd1498Szrj 		       < HOST_WIDE_INT_1U << (mode_width - 2)
1241138fd1498Szrj 		      /* (A - C1) always sign-extends, like C2.  */
1241238fd1498Szrj 		      && num_sign_bit_copies (a, inner_mode)
1241338fd1498Szrj 			 > (unsigned int) (GET_MODE_PRECISION (inner_mode)
1241438fd1498Szrj 					   - (mode_width - 1))))
1241538fd1498Szrj 		{
1241638fd1498Szrj 		  op0 = SUBREG_REG (op0);
1241738fd1498Szrj 		  continue;
1241838fd1498Szrj 		}
1241938fd1498Szrj 	    }
1242038fd1498Szrj 
1242138fd1498Szrj 	  /* If the inner mode is narrower and we are extracting the low part,
1242238fd1498Szrj 	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
1242338fd1498Szrj 	  if (paradoxical_subreg_p (op0))
1242438fd1498Szrj 	    ;
1242538fd1498Szrj 	  else if (subreg_lowpart_p (op0)
1242638fd1498Szrj 		   && GET_MODE_CLASS (mode) == MODE_INT
1242738fd1498Szrj 		   && is_int_mode (GET_MODE (SUBREG_REG (op0)), &inner_mode)
1242838fd1498Szrj 		   && (code == NE || code == EQ)
1242938fd1498Szrj 		   && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
1243038fd1498Szrj 		   && !paradoxical_subreg_p (op0)
1243138fd1498Szrj 		   && (nonzero_bits (SUBREG_REG (op0), inner_mode)
1243238fd1498Szrj 		       & ~GET_MODE_MASK (mode)) == 0)
1243338fd1498Szrj 	    {
1243438fd1498Szrj 	      /* Remove outer subregs that don't do anything.  */
1243538fd1498Szrj 	      tem = gen_lowpart (inner_mode, op1);
1243638fd1498Szrj 
1243738fd1498Szrj 	      if ((nonzero_bits (tem, inner_mode)
1243838fd1498Szrj 		   & ~GET_MODE_MASK (mode)) == 0)
1243938fd1498Szrj 		{
1244038fd1498Szrj 		  op0 = SUBREG_REG (op0);
1244138fd1498Szrj 		  op1 = tem;
1244238fd1498Szrj 		  continue;
1244338fd1498Szrj 		}
1244438fd1498Szrj 	      break;
1244538fd1498Szrj 	    }
1244638fd1498Szrj 	  else
1244738fd1498Szrj 	    break;
1244838fd1498Szrj 
1244938fd1498Szrj 	  /* FALLTHROUGH */
1245038fd1498Szrj 
1245138fd1498Szrj 	case ZERO_EXTEND:
1245238fd1498Szrj 	  if (is_int_mode (GET_MODE (XEXP (op0, 0)), &mode)
1245338fd1498Szrj 	      && (unsigned_comparison_p || equality_comparison_p)
1245438fd1498Szrj 	      && HWI_COMPUTABLE_MODE_P (mode)
1245538fd1498Szrj 	      && (unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (mode)
1245638fd1498Szrj 	      && const_op >= 0
1245738fd1498Szrj 	      && have_insn_for (COMPARE, mode))
1245838fd1498Szrj 	    {
1245938fd1498Szrj 	      op0 = XEXP (op0, 0);
1246038fd1498Szrj 	      continue;
1246138fd1498Szrj 	    }
1246238fd1498Szrj 	  break;
1246338fd1498Szrj 
1246438fd1498Szrj 	case PLUS:
1246538fd1498Szrj 	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
1246638fd1498Szrj 	     this for equality comparisons due to pathological cases involving
1246738fd1498Szrj 	     overflows.  */
1246838fd1498Szrj 	  if (equality_comparison_p
1246938fd1498Szrj 	      && (tem = simplify_binary_operation (MINUS, mode,
1247038fd1498Szrj 						   op1, XEXP (op0, 1))) != 0)
1247138fd1498Szrj 	    {
1247238fd1498Szrj 	      op0 = XEXP (op0, 0);
1247338fd1498Szrj 	      op1 = tem;
1247438fd1498Szrj 	      continue;
1247538fd1498Szrj 	    }
1247638fd1498Szrj 
1247738fd1498Szrj 	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
1247838fd1498Szrj 	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
1247938fd1498Szrj 	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
1248038fd1498Szrj 	    {
1248138fd1498Szrj 	      op0 = XEXP (XEXP (op0, 0), 0);
1248238fd1498Szrj 	      code = (code == LT ? EQ : NE);
1248338fd1498Szrj 	      continue;
1248438fd1498Szrj 	    }
1248538fd1498Szrj 	  break;
1248638fd1498Szrj 
1248738fd1498Szrj 	case MINUS:
1248838fd1498Szrj 	  /* We used to optimize signed comparisons against zero, but that
1248938fd1498Szrj 	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
1249038fd1498Szrj 	     arrive here as equality comparisons, or (GEU, LTU) are
1249138fd1498Szrj 	     optimized away.  No need to special-case them.  */
1249238fd1498Szrj 
1249338fd1498Szrj 	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
1249438fd1498Szrj 	     (eq B (minus A C)), whichever simplifies.  We can only do
1249538fd1498Szrj 	     this for equality comparisons due to pathological cases involving
1249638fd1498Szrj 	     overflows.  */
1249738fd1498Szrj 	  if (equality_comparison_p
1249838fd1498Szrj 	      && (tem = simplify_binary_operation (PLUS, mode,
1249938fd1498Szrj 						   XEXP (op0, 1), op1)) != 0)
1250038fd1498Szrj 	    {
1250138fd1498Szrj 	      op0 = XEXP (op0, 0);
1250238fd1498Szrj 	      op1 = tem;
1250338fd1498Szrj 	      continue;
1250438fd1498Szrj 	    }
1250538fd1498Szrj 
1250638fd1498Szrj 	  if (equality_comparison_p
1250738fd1498Szrj 	      && (tem = simplify_binary_operation (MINUS, mode,
1250838fd1498Szrj 						   XEXP (op0, 0), op1)) != 0)
1250938fd1498Szrj 	    {
1251038fd1498Szrj 	      op0 = XEXP (op0, 1);
1251138fd1498Szrj 	      op1 = tem;
1251238fd1498Szrj 	      continue;
1251338fd1498Szrj 	    }
1251438fd1498Szrj 
1251538fd1498Szrj 	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
1251638fd1498Szrj 	     of bits in X minus 1, is one iff X > 0.  */
1251738fd1498Szrj 	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
1251838fd1498Szrj 	      && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
1251938fd1498Szrj 	      && UINTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
1252038fd1498Szrj 	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
1252138fd1498Szrj 	    {
1252238fd1498Szrj 	      op0 = XEXP (op0, 1);
1252338fd1498Szrj 	      code = (code == GE ? LE : GT);
1252438fd1498Szrj 	      continue;
1252538fd1498Szrj 	    }
1252638fd1498Szrj 	  break;
1252738fd1498Szrj 
1252838fd1498Szrj 	case XOR:
1252938fd1498Szrj 	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
1253038fd1498Szrj 	     if C is zero or B is a constant.  */
1253138fd1498Szrj 	  if (equality_comparison_p
1253238fd1498Szrj 	      && (tem = simplify_binary_operation (XOR, mode,
1253338fd1498Szrj 						   XEXP (op0, 1), op1)) != 0)
1253438fd1498Szrj 	    {
1253538fd1498Szrj 	      op0 = XEXP (op0, 0);
1253638fd1498Szrj 	      op1 = tem;
1253738fd1498Szrj 	      continue;
1253838fd1498Szrj 	    }
1253938fd1498Szrj 	  break;
1254038fd1498Szrj 
1254138fd1498Szrj 
1254238fd1498Szrj 	case IOR:
1254338fd1498Szrj 	  /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
1254438fd1498Szrj 	     iff X <= 0.  */
1254538fd1498Szrj 	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
1254638fd1498Szrj 	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
1254738fd1498Szrj 	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
1254838fd1498Szrj 	    {
1254938fd1498Szrj 	      op0 = XEXP (op0, 1);
1255038fd1498Szrj 	      code = (code == GE ? GT : LE);
1255138fd1498Szrj 	      continue;
1255238fd1498Szrj 	    }
1255338fd1498Szrj 	  break;
1255438fd1498Szrj 
1255538fd1498Szrj 	case AND:
1255638fd1498Szrj 	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
1255738fd1498Szrj 	     will be converted to a ZERO_EXTRACT later.  */
1255838fd1498Szrj 	  if (const_op == 0 && equality_comparison_p
1255938fd1498Szrj 	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
1256038fd1498Szrj 	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
1256138fd1498Szrj 	    {
1256238fd1498Szrj 	      op0 = gen_rtx_LSHIFTRT (mode, XEXP (op0, 1),
1256338fd1498Szrj 				      XEXP (XEXP (op0, 0), 1));
1256438fd1498Szrj 	      op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
1256538fd1498Szrj 	      continue;
1256638fd1498Szrj 	    }
1256738fd1498Szrj 
1256838fd1498Szrj 	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
1256938fd1498Szrj 	     zero and X is a comparison and C1 and C2 describe only bits set
1257038fd1498Szrj 	     in STORE_FLAG_VALUE, we can compare with X.  */
1257138fd1498Szrj 	  if (const_op == 0 && equality_comparison_p
1257238fd1498Szrj 	      && mode_width <= HOST_BITS_PER_WIDE_INT
1257338fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1))
1257438fd1498Szrj 	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
1257538fd1498Szrj 	      && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
1257638fd1498Szrj 	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
1257738fd1498Szrj 	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
1257838fd1498Szrj 	    {
1257938fd1498Szrj 	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
1258038fd1498Szrj 		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
1258138fd1498Szrj 	      if ((~STORE_FLAG_VALUE & mask) == 0
1258238fd1498Szrj 		  && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
1258338fd1498Szrj 		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
1258438fd1498Szrj 			  && COMPARISON_P (tem))))
1258538fd1498Szrj 		{
1258638fd1498Szrj 		  op0 = XEXP (XEXP (op0, 0), 0);
1258738fd1498Szrj 		  continue;
1258838fd1498Szrj 		}
1258938fd1498Szrj 	    }
1259038fd1498Szrj 
1259138fd1498Szrj 	  /* If we are doing an equality comparison of an AND of a bit equal
1259238fd1498Szrj 	     to the sign bit, replace this with a LT or GE comparison of
1259338fd1498Szrj 	     the underlying value.  */
1259438fd1498Szrj 	  if (equality_comparison_p
1259538fd1498Szrj 	      && const_op == 0
1259638fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1))
1259738fd1498Szrj 	      && mode_width <= HOST_BITS_PER_WIDE_INT
1259838fd1498Szrj 	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
1259938fd1498Szrj 		  == HOST_WIDE_INT_1U << (mode_width - 1)))
1260038fd1498Szrj 	    {
1260138fd1498Szrj 	      op0 = XEXP (op0, 0);
1260238fd1498Szrj 	      code = (code == EQ ? GE : LT);
1260338fd1498Szrj 	      continue;
1260438fd1498Szrj 	    }
1260538fd1498Szrj 
1260638fd1498Szrj 	  /* If this AND operation is really a ZERO_EXTEND from a narrower
1260738fd1498Szrj 	     mode, the constant fits within that mode, and this is either an
1260838fd1498Szrj 	     equality or unsigned comparison, try to do this comparison in
1260938fd1498Szrj 	     the narrower mode.
1261038fd1498Szrj 
1261138fd1498Szrj 	     Note that in:
1261238fd1498Szrj 
1261338fd1498Szrj 	     (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
1261438fd1498Szrj 	     -> (ne:DI (reg:SI 4) (const_int 0))
1261538fd1498Szrj 
1261638fd1498Szrj 	     unless TARGET_TRULY_NOOP_TRUNCATION allows it or the register is
1261738fd1498Szrj 	     known to hold a value of the required mode the
1261838fd1498Szrj 	     transformation is invalid.  */
1261938fd1498Szrj 	  if ((equality_comparison_p || unsigned_comparison_p)
1262038fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1))
1262138fd1498Szrj 	      && (i = exact_log2 ((UINTVAL (XEXP (op0, 1))
1262238fd1498Szrj 				   & GET_MODE_MASK (mode))
1262338fd1498Szrj 				  + 1)) >= 0
1262438fd1498Szrj 	      && const_op >> i == 0
1262538fd1498Szrj 	      && int_mode_for_size (i, 1).exists (&tmode))
1262638fd1498Szrj 	    {
1262738fd1498Szrj 	      op0 = gen_lowpart_or_truncate (tmode, XEXP (op0, 0));
1262838fd1498Szrj 	      continue;
1262938fd1498Szrj 	    }
1263038fd1498Szrj 
1263138fd1498Szrj 	  /* If this is (and:M1 (subreg:M1 X:M2 0) (const_int C1)) where C1
1263238fd1498Szrj 	     fits in both M1 and M2 and the SUBREG is either paradoxical
1263338fd1498Szrj 	     or represents the low part, permute the SUBREG and the AND
1263438fd1498Szrj 	     and try again.  */
1263538fd1498Szrj 	  if (GET_CODE (XEXP (op0, 0)) == SUBREG
1263638fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1)))
1263738fd1498Szrj 	    {
1263838fd1498Szrj 	      unsigned HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
1263938fd1498Szrj 	      /* Require an integral mode, to avoid creating something like
1264038fd1498Szrj 		 (AND:SF ...).  */
1264138fd1498Szrj 	      if ((is_a <scalar_int_mode>
1264238fd1498Szrj 		   (GET_MODE (SUBREG_REG (XEXP (op0, 0))), &tmode))
1264338fd1498Szrj 		  /* It is unsafe to commute the AND into the SUBREG if the
1264438fd1498Szrj 		     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
1264538fd1498Szrj 		     not defined.  As originally written the upper bits
1264638fd1498Szrj 		     have a defined value due to the AND operation.
1264738fd1498Szrj 		     However, if we commute the AND inside the SUBREG then
1264838fd1498Szrj 		     they no longer have defined values and the meaning of
1264938fd1498Szrj 		     the code has been changed.
1265038fd1498Szrj 		     Also C1 should not change value in the smaller mode,
1265138fd1498Szrj 		     see PR67028 (a positive C1 can become negative in the
1265238fd1498Szrj 		     smaller mode, so that the AND does no longer mask the
1265338fd1498Szrj 		     upper bits).  */
1265438fd1498Szrj 		  && ((WORD_REGISTER_OPERATIONS
1265538fd1498Szrj 		       && mode_width > GET_MODE_PRECISION (tmode)
1265638fd1498Szrj 		       && mode_width <= BITS_PER_WORD
1265738fd1498Szrj 		       && trunc_int_for_mode (c1, tmode) == (HOST_WIDE_INT) c1)
1265838fd1498Szrj 		      || (mode_width <= GET_MODE_PRECISION (tmode)
1265938fd1498Szrj 			  && subreg_lowpart_p (XEXP (op0, 0))))
1266038fd1498Szrj 		  && mode_width <= HOST_BITS_PER_WIDE_INT
1266138fd1498Szrj 		  && HWI_COMPUTABLE_MODE_P (tmode)
1266238fd1498Szrj 		  && (c1 & ~mask) == 0
1266338fd1498Szrj 		  && (c1 & ~GET_MODE_MASK (tmode)) == 0
1266438fd1498Szrj 		  && c1 != mask
1266538fd1498Szrj 		  && c1 != GET_MODE_MASK (tmode))
1266638fd1498Szrj 		{
1266738fd1498Szrj 		  op0 = simplify_gen_binary (AND, tmode,
1266838fd1498Szrj 					     SUBREG_REG (XEXP (op0, 0)),
1266938fd1498Szrj 					     gen_int_mode (c1, tmode));
1267038fd1498Szrj 		  op0 = gen_lowpart (mode, op0);
1267138fd1498Szrj 		  continue;
1267238fd1498Szrj 		}
1267338fd1498Szrj 	    }
1267438fd1498Szrj 
1267538fd1498Szrj 	  /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
1267638fd1498Szrj 	  if (const_op == 0 && equality_comparison_p
1267738fd1498Szrj 	      && XEXP (op0, 1) == const1_rtx
1267838fd1498Szrj 	      && GET_CODE (XEXP (op0, 0)) == NOT)
1267938fd1498Szrj 	    {
1268038fd1498Szrj 	      op0 = simplify_and_const_int (NULL_RTX, mode,
1268138fd1498Szrj 					    XEXP (XEXP (op0, 0), 0), 1);
1268238fd1498Szrj 	      code = (code == NE ? EQ : NE);
1268338fd1498Szrj 	      continue;
1268438fd1498Szrj 	    }
1268538fd1498Szrj 
1268638fd1498Szrj 	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
1268738fd1498Szrj 	     (eq (and (lshiftrt X) 1) 0).
1268838fd1498Szrj 	     Also handle the case where (not X) is expressed using xor.  */
1268938fd1498Szrj 	  if (const_op == 0 && equality_comparison_p
1269038fd1498Szrj 	      && XEXP (op0, 1) == const1_rtx
1269138fd1498Szrj 	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
1269238fd1498Szrj 	    {
1269338fd1498Szrj 	      rtx shift_op = XEXP (XEXP (op0, 0), 0);
1269438fd1498Szrj 	      rtx shift_count = XEXP (XEXP (op0, 0), 1);
1269538fd1498Szrj 
1269638fd1498Szrj 	      if (GET_CODE (shift_op) == NOT
1269738fd1498Szrj 		  || (GET_CODE (shift_op) == XOR
1269838fd1498Szrj 		      && CONST_INT_P (XEXP (shift_op, 1))
1269938fd1498Szrj 		      && CONST_INT_P (shift_count)
1270038fd1498Szrj 		      && HWI_COMPUTABLE_MODE_P (mode)
1270138fd1498Szrj 		      && (UINTVAL (XEXP (shift_op, 1))
1270238fd1498Szrj 			  == HOST_WIDE_INT_1U
1270338fd1498Szrj 			       << INTVAL (shift_count))))
1270438fd1498Szrj 		{
1270538fd1498Szrj 		  op0
1270638fd1498Szrj 		    = gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count);
1270738fd1498Szrj 		  op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
1270838fd1498Szrj 		  code = (code == NE ? EQ : NE);
1270938fd1498Szrj 		  continue;
1271038fd1498Szrj 		}
1271138fd1498Szrj 	    }
1271238fd1498Szrj 	  break;
1271338fd1498Szrj 
1271438fd1498Szrj 	case ASHIFT:
1271538fd1498Szrj 	  /* If we have (compare (ashift FOO N) (const_int C)) and
1271638fd1498Szrj 	     the high order N bits of FOO (N+1 if an inequality comparison)
1271738fd1498Szrj 	     are known to be zero, we can do this by comparing FOO with C
1271838fd1498Szrj 	     shifted right N bits so long as the low-order N bits of C are
1271938fd1498Szrj 	     zero.  */
1272038fd1498Szrj 	  if (CONST_INT_P (XEXP (op0, 1))
1272138fd1498Szrj 	      && INTVAL (XEXP (op0, 1)) >= 0
1272238fd1498Szrj 	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
1272338fd1498Szrj 		  < HOST_BITS_PER_WIDE_INT)
1272438fd1498Szrj 	      && (((unsigned HOST_WIDE_INT) const_op
1272538fd1498Szrj 		   & ((HOST_WIDE_INT_1U << INTVAL (XEXP (op0, 1)))
1272638fd1498Szrj 		      - 1)) == 0)
1272738fd1498Szrj 	      && mode_width <= HOST_BITS_PER_WIDE_INT
1272838fd1498Szrj 	      && (nonzero_bits (XEXP (op0, 0), mode)
1272938fd1498Szrj 		  & ~(mask >> (INTVAL (XEXP (op0, 1))
1273038fd1498Szrj 			       + ! equality_comparison_p))) == 0)
1273138fd1498Szrj 	    {
1273238fd1498Szrj 	      /* We must perform a logical shift, not an arithmetic one,
1273338fd1498Szrj 		 as we want the top N bits of C to be zero.  */
1273438fd1498Szrj 	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
1273538fd1498Szrj 
1273638fd1498Szrj 	      temp >>= INTVAL (XEXP (op0, 1));
1273738fd1498Szrj 	      op1 = gen_int_mode (temp, mode);
1273838fd1498Szrj 	      op0 = XEXP (op0, 0);
1273938fd1498Szrj 	      continue;
1274038fd1498Szrj 	    }
1274138fd1498Szrj 
1274238fd1498Szrj 	  /* If we are doing a sign bit comparison, it means we are testing
1274338fd1498Szrj 	     a particular bit.  Convert it to the appropriate AND.  */
1274438fd1498Szrj 	  if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
1274538fd1498Szrj 	      && mode_width <= HOST_BITS_PER_WIDE_INT)
1274638fd1498Szrj 	    {
1274738fd1498Szrj 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
1274838fd1498Szrj 					    (HOST_WIDE_INT_1U
1274938fd1498Szrj 					     << (mode_width - 1
1275038fd1498Szrj 						 - INTVAL (XEXP (op0, 1)))));
1275138fd1498Szrj 	      code = (code == LT ? NE : EQ);
1275238fd1498Szrj 	      continue;
1275338fd1498Szrj 	    }
1275438fd1498Szrj 
1275538fd1498Szrj 	  /* If this an equality comparison with zero and we are shifting
1275638fd1498Szrj 	     the low bit to the sign bit, we can convert this to an AND of the
1275738fd1498Szrj 	     low-order bit.  */
1275838fd1498Szrj 	  if (const_op == 0 && equality_comparison_p
1275938fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1))
1276038fd1498Szrj 	      && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
1276138fd1498Szrj 	    {
1276238fd1498Szrj 	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), 1);
1276338fd1498Szrj 	      continue;
1276438fd1498Szrj 	    }
1276538fd1498Szrj 	  break;
1276638fd1498Szrj 
1276738fd1498Szrj 	case ASHIFTRT:
1276838fd1498Szrj 	  /* If this is an equality comparison with zero, we can do this
1276938fd1498Szrj 	     as a logical shift, which might be much simpler.  */
1277038fd1498Szrj 	  if (equality_comparison_p && const_op == 0
1277138fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1)))
1277238fd1498Szrj 	    {
1277338fd1498Szrj 	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
1277438fd1498Szrj 					  XEXP (op0, 0),
1277538fd1498Szrj 					  INTVAL (XEXP (op0, 1)));
1277638fd1498Szrj 	      continue;
1277738fd1498Szrj 	    }
1277838fd1498Szrj 
1277938fd1498Szrj 	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
1278038fd1498Szrj 	     do the comparison in a narrower mode.  */
1278138fd1498Szrj 	  if (! unsigned_comparison_p
1278238fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1))
1278338fd1498Szrj 	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
1278438fd1498Szrj 	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
1278538fd1498Szrj 	      && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
1278638fd1498Szrj 		  .exists (&tmode))
1278738fd1498Szrj 	      && (((unsigned HOST_WIDE_INT) const_op
1278838fd1498Szrj 		   + (GET_MODE_MASK (tmode) >> 1) + 1)
1278938fd1498Szrj 		  <= GET_MODE_MASK (tmode)))
1279038fd1498Szrj 	    {
1279138fd1498Szrj 	      op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
1279238fd1498Szrj 	      continue;
1279338fd1498Szrj 	    }
1279438fd1498Szrj 
1279538fd1498Szrj 	  /* Likewise if OP0 is a PLUS of a sign extension with a
1279638fd1498Szrj 	     constant, which is usually represented with the PLUS
1279738fd1498Szrj 	     between the shifts.  */
1279838fd1498Szrj 	  if (! unsigned_comparison_p
1279938fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1))
1280038fd1498Szrj 	      && GET_CODE (XEXP (op0, 0)) == PLUS
1280138fd1498Szrj 	      && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
1280238fd1498Szrj 	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
1280338fd1498Szrj 	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
1280438fd1498Szrj 	      && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
1280538fd1498Szrj 		  .exists (&tmode))
1280638fd1498Szrj 	      && (((unsigned HOST_WIDE_INT) const_op
1280738fd1498Szrj 		   + (GET_MODE_MASK (tmode) >> 1) + 1)
1280838fd1498Szrj 		  <= GET_MODE_MASK (tmode)))
1280938fd1498Szrj 	    {
1281038fd1498Szrj 	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
1281138fd1498Szrj 	      rtx add_const = XEXP (XEXP (op0, 0), 1);
1281238fd1498Szrj 	      rtx new_const = simplify_gen_binary (ASHIFTRT, mode,
1281338fd1498Szrj 						   add_const, XEXP (op0, 1));
1281438fd1498Szrj 
1281538fd1498Szrj 	      op0 = simplify_gen_binary (PLUS, tmode,
1281638fd1498Szrj 					 gen_lowpart (tmode, inner),
1281738fd1498Szrj 					 new_const);
1281838fd1498Szrj 	      continue;
1281938fd1498Szrj 	    }
1282038fd1498Szrj 
1282138fd1498Szrj 	  /* FALLTHROUGH */
1282238fd1498Szrj 	case LSHIFTRT:
1282338fd1498Szrj 	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
1282438fd1498Szrj 	     the low order N bits of FOO are known to be zero, we can do this
1282538fd1498Szrj 	     by comparing FOO with C shifted left N bits so long as no
1282638fd1498Szrj 	     overflow occurs.  Even if the low order N bits of FOO aren't known
1282738fd1498Szrj 	     to be zero, if the comparison is >= or < we can use the same
1282838fd1498Szrj 	     optimization and for > or <= by setting all the low
1282938fd1498Szrj 	     order N bits in the comparison constant.  */
1283038fd1498Szrj 	  if (CONST_INT_P (XEXP (op0, 1))
1283138fd1498Szrj 	      && INTVAL (XEXP (op0, 1)) > 0
1283238fd1498Szrj 	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
1283338fd1498Szrj 	      && mode_width <= HOST_BITS_PER_WIDE_INT
1283438fd1498Szrj 	      && (((unsigned HOST_WIDE_INT) const_op
1283538fd1498Szrj 		   + (GET_CODE (op0) != LSHIFTRT
1283638fd1498Szrj 		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
1283738fd1498Szrj 			 + 1)
1283838fd1498Szrj 		      : 0))
1283938fd1498Szrj 		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
1284038fd1498Szrj 	    {
1284138fd1498Szrj 	      unsigned HOST_WIDE_INT low_bits
1284238fd1498Szrj 		= (nonzero_bits (XEXP (op0, 0), mode)
1284338fd1498Szrj 		   & ((HOST_WIDE_INT_1U
1284438fd1498Szrj 		       << INTVAL (XEXP (op0, 1))) - 1));
1284538fd1498Szrj 	      if (low_bits == 0 || !equality_comparison_p)
1284638fd1498Szrj 		{
1284738fd1498Szrj 		  /* If the shift was logical, then we must make the condition
1284838fd1498Szrj 		     unsigned.  */
1284938fd1498Szrj 		  if (GET_CODE (op0) == LSHIFTRT)
1285038fd1498Szrj 		    code = unsigned_condition (code);
1285138fd1498Szrj 
1285238fd1498Szrj 		  const_op = (unsigned HOST_WIDE_INT) const_op
1285338fd1498Szrj 			      << INTVAL (XEXP (op0, 1));
1285438fd1498Szrj 		  if (low_bits != 0
1285538fd1498Szrj 		      && (code == GT || code == GTU
1285638fd1498Szrj 			  || code == LE || code == LEU))
1285738fd1498Szrj 		    const_op
1285838fd1498Szrj 		      |= ((HOST_WIDE_INT_1 << INTVAL (XEXP (op0, 1))) - 1);
1285938fd1498Szrj 		  op1 = GEN_INT (const_op);
1286038fd1498Szrj 		  op0 = XEXP (op0, 0);
1286138fd1498Szrj 		  continue;
1286238fd1498Szrj 		}
1286338fd1498Szrj 	    }
1286438fd1498Szrj 
1286538fd1498Szrj 	  /* If we are using this shift to extract just the sign bit, we
1286638fd1498Szrj 	     can replace this with an LT or GE comparison.  */
1286738fd1498Szrj 	  if (const_op == 0
1286838fd1498Szrj 	      && (equality_comparison_p || sign_bit_comparison_p)
1286938fd1498Szrj 	      && CONST_INT_P (XEXP (op0, 1))
1287038fd1498Szrj 	      && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
1287138fd1498Szrj 	    {
1287238fd1498Szrj 	      op0 = XEXP (op0, 0);
1287338fd1498Szrj 	      code = (code == NE || code == GT ? LT : GE);
1287438fd1498Szrj 	      continue;
1287538fd1498Szrj 	    }
1287638fd1498Szrj 	  break;
1287738fd1498Szrj 
1287838fd1498Szrj 	default:
1287938fd1498Szrj 	  break;
1288038fd1498Szrj 	}
1288138fd1498Szrj 
1288238fd1498Szrj       break;
1288338fd1498Szrj     }
1288438fd1498Szrj 
1288538fd1498Szrj   /* Now make any compound operations involved in this comparison.  Then,
1288638fd1498Szrj      check for an outmost SUBREG on OP0 that is not doing anything or is
1288738fd1498Szrj      paradoxical.  The latter transformation must only be performed when
1288838fd1498Szrj      it is known that the "extra" bits will be the same in op0 and op1 or
1288938fd1498Szrj      that they don't matter.  There are three cases to consider:
1289038fd1498Szrj 
1289138fd1498Szrj      1. SUBREG_REG (op0) is a register.  In this case the bits are don't
1289238fd1498Szrj      care bits and we can assume they have any convenient value.  So
1289338fd1498Szrj      making the transformation is safe.
1289438fd1498Szrj 
1289538fd1498Szrj      2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is UNKNOWN.
1289638fd1498Szrj      In this case the upper bits of op0 are undefined.  We should not make
1289738fd1498Szrj      the simplification in that case as we do not know the contents of
1289838fd1498Szrj      those bits.
1289938fd1498Szrj 
1290038fd1498Szrj      3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not UNKNOWN.
1290138fd1498Szrj      In that case we know those bits are zeros or ones.  We must also be
1290238fd1498Szrj      sure that they are the same as the upper bits of op1.
1290338fd1498Szrj 
1290438fd1498Szrj      We can never remove a SUBREG for a non-equality comparison because
1290538fd1498Szrj      the sign bit is in a different place in the underlying object.  */
1290638fd1498Szrj 
1290738fd1498Szrj   rtx_code op0_mco_code = SET;
1290838fd1498Szrj   if (op1 == const0_rtx)
1290938fd1498Szrj     op0_mco_code = code == NE || code == EQ ? EQ : COMPARE;
1291038fd1498Szrj 
1291138fd1498Szrj   op0 = make_compound_operation (op0, op0_mco_code);
1291238fd1498Szrj   op1 = make_compound_operation (op1, SET);
1291338fd1498Szrj 
1291438fd1498Szrj   if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
1291538fd1498Szrj       && is_int_mode (GET_MODE (op0), &mode)
1291638fd1498Szrj       && is_int_mode (GET_MODE (SUBREG_REG (op0)), &inner_mode)
1291738fd1498Szrj       && (code == NE || code == EQ))
1291838fd1498Szrj     {
1291938fd1498Szrj       if (paradoxical_subreg_p (op0))
1292038fd1498Szrj 	{
1292138fd1498Szrj 	  /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
1292238fd1498Szrj 	     implemented.  */
1292338fd1498Szrj 	  if (REG_P (SUBREG_REG (op0)))
1292438fd1498Szrj 	    {
1292538fd1498Szrj 	      op0 = SUBREG_REG (op0);
1292638fd1498Szrj 	      op1 = gen_lowpart (inner_mode, op1);
1292738fd1498Szrj 	    }
1292838fd1498Szrj 	}
1292938fd1498Szrj       else if (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
1293038fd1498Szrj 	       && (nonzero_bits (SUBREG_REG (op0), inner_mode)
1293138fd1498Szrj 		   & ~GET_MODE_MASK (mode)) == 0)
1293238fd1498Szrj 	{
1293338fd1498Szrj 	  tem = gen_lowpart (inner_mode, op1);
1293438fd1498Szrj 
1293538fd1498Szrj 	  if ((nonzero_bits (tem, inner_mode) & ~GET_MODE_MASK (mode)) == 0)
1293638fd1498Szrj 	    op0 = SUBREG_REG (op0), op1 = tem;
1293738fd1498Szrj 	}
1293838fd1498Szrj     }
1293938fd1498Szrj 
1294038fd1498Szrj   /* We now do the opposite procedure: Some machines don't have compare
1294138fd1498Szrj      insns in all modes.  If OP0's mode is an integer mode smaller than a
1294238fd1498Szrj      word and we can't do a compare in that mode, see if there is a larger
1294338fd1498Szrj      mode for which we can do the compare.  There are a number of cases in
1294438fd1498Szrj      which we can use the wider mode.  */
1294538fd1498Szrj 
1294638fd1498Szrj   if (is_int_mode (GET_MODE (op0), &mode)
1294738fd1498Szrj       && GET_MODE_SIZE (mode) < UNITS_PER_WORD
1294838fd1498Szrj       && ! have_insn_for (COMPARE, mode))
1294938fd1498Szrj     FOR_EACH_WIDER_MODE (tmode_iter, mode)
1295038fd1498Szrj       {
1295138fd1498Szrj 	tmode = tmode_iter.require ();
1295238fd1498Szrj 	if (!HWI_COMPUTABLE_MODE_P (tmode))
1295338fd1498Szrj 	  break;
1295438fd1498Szrj 	if (have_insn_for (COMPARE, tmode))
1295538fd1498Szrj 	  {
1295638fd1498Szrj 	    int zero_extended;
1295738fd1498Szrj 
1295838fd1498Szrj 	    /* If this is a test for negative, we can make an explicit
1295938fd1498Szrj 	       test of the sign bit.  Test this first so we can use
1296038fd1498Szrj 	       a paradoxical subreg to extend OP0.  */
1296138fd1498Szrj 
1296238fd1498Szrj 	    if (op1 == const0_rtx && (code == LT || code == GE)
1296338fd1498Szrj 		&& HWI_COMPUTABLE_MODE_P (mode))
1296438fd1498Szrj 	      {
1296538fd1498Szrj 		unsigned HOST_WIDE_INT sign
1296638fd1498Szrj 		  = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (mode) - 1);
1296738fd1498Szrj 		op0 = simplify_gen_binary (AND, tmode,
1296838fd1498Szrj 					   gen_lowpart (tmode, op0),
1296938fd1498Szrj 					   gen_int_mode (sign, tmode));
1297038fd1498Szrj 		code = (code == LT) ? NE : EQ;
1297138fd1498Szrj 		break;
1297238fd1498Szrj 	      }
1297338fd1498Szrj 
1297438fd1498Szrj 	    /* If the only nonzero bits in OP0 and OP1 are those in the
1297538fd1498Szrj 	       narrower mode and this is an equality or unsigned comparison,
1297638fd1498Szrj 	       we can use the wider mode.  Similarly for sign-extended
1297738fd1498Szrj 	       values, in which case it is true for all comparisons.  */
1297838fd1498Szrj 	    zero_extended = ((code == EQ || code == NE
1297938fd1498Szrj 			      || code == GEU || code == GTU
1298038fd1498Szrj 			      || code == LEU || code == LTU)
1298138fd1498Szrj 			     && (nonzero_bits (op0, tmode)
1298238fd1498Szrj 				 & ~GET_MODE_MASK (mode)) == 0
1298338fd1498Szrj 			     && ((CONST_INT_P (op1)
1298438fd1498Szrj 				  || (nonzero_bits (op1, tmode)
1298538fd1498Szrj 				      & ~GET_MODE_MASK (mode)) == 0)));
1298638fd1498Szrj 
1298738fd1498Szrj 	    if (zero_extended
1298838fd1498Szrj 		|| ((num_sign_bit_copies (op0, tmode)
1298938fd1498Szrj 		     > (unsigned int) (GET_MODE_PRECISION (tmode)
1299038fd1498Szrj 				       - GET_MODE_PRECISION (mode)))
1299138fd1498Szrj 		    && (num_sign_bit_copies (op1, tmode)
1299238fd1498Szrj 			> (unsigned int) (GET_MODE_PRECISION (tmode)
1299338fd1498Szrj 					  - GET_MODE_PRECISION (mode)))))
1299438fd1498Szrj 	      {
1299538fd1498Szrj 		/* If OP0 is an AND and we don't have an AND in MODE either,
1299638fd1498Szrj 		   make a new AND in the proper mode.  */
1299738fd1498Szrj 		if (GET_CODE (op0) == AND
1299838fd1498Szrj 		    && !have_insn_for (AND, mode))
1299938fd1498Szrj 		  op0 = simplify_gen_binary (AND, tmode,
1300038fd1498Szrj 					     gen_lowpart (tmode,
1300138fd1498Szrj 							  XEXP (op0, 0)),
1300238fd1498Szrj 					     gen_lowpart (tmode,
1300338fd1498Szrj 							  XEXP (op0, 1)));
1300438fd1498Szrj 		else
1300538fd1498Szrj 		  {
1300638fd1498Szrj 		    if (zero_extended)
1300738fd1498Szrj 		      {
1300838fd1498Szrj 			op0 = simplify_gen_unary (ZERO_EXTEND, tmode,
1300938fd1498Szrj 						  op0, mode);
1301038fd1498Szrj 			op1 = simplify_gen_unary (ZERO_EXTEND, tmode,
1301138fd1498Szrj 						  op1, mode);
1301238fd1498Szrj 		      }
1301338fd1498Szrj 		    else
1301438fd1498Szrj 		      {
1301538fd1498Szrj 			op0 = simplify_gen_unary (SIGN_EXTEND, tmode,
1301638fd1498Szrj 						  op0, mode);
1301738fd1498Szrj 			op1 = simplify_gen_unary (SIGN_EXTEND, tmode,
1301838fd1498Szrj 						  op1, mode);
1301938fd1498Szrj 		      }
1302038fd1498Szrj 		    break;
1302138fd1498Szrj 		  }
1302238fd1498Szrj 	      }
1302338fd1498Szrj 	  }
1302438fd1498Szrj       }
1302538fd1498Szrj 
1302638fd1498Szrj   /* We may have changed the comparison operands.  Re-canonicalize.  */
1302738fd1498Szrj   if (swap_commutative_operands_p (op0, op1))
1302838fd1498Szrj     {
1302938fd1498Szrj       std::swap (op0, op1);
1303038fd1498Szrj       code = swap_condition (code);
1303138fd1498Szrj     }
1303238fd1498Szrj 
1303338fd1498Szrj   /* If this machine only supports a subset of valid comparisons, see if we
1303438fd1498Szrj      can convert an unsupported one into a supported one.  */
1303538fd1498Szrj   target_canonicalize_comparison (&code, &op0, &op1, 0);
1303638fd1498Szrj 
1303738fd1498Szrj   *pop0 = op0;
1303838fd1498Szrj   *pop1 = op1;
1303938fd1498Szrj 
1304038fd1498Szrj   return code;
1304138fd1498Szrj }
1304238fd1498Szrj 
1304338fd1498Szrj /* Utility function for record_value_for_reg.  Count number of
1304438fd1498Szrj    rtxs in X.  */
1304538fd1498Szrj static int
count_rtxs(rtx x)1304638fd1498Szrj count_rtxs (rtx x)
1304738fd1498Szrj {
1304838fd1498Szrj   enum rtx_code code = GET_CODE (x);
1304938fd1498Szrj   const char *fmt;
1305038fd1498Szrj   int i, j, ret = 1;
1305138fd1498Szrj 
1305238fd1498Szrj   if (GET_RTX_CLASS (code) == RTX_BIN_ARITH
1305338fd1498Szrj       || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
1305438fd1498Szrj     {
1305538fd1498Szrj       rtx x0 = XEXP (x, 0);
1305638fd1498Szrj       rtx x1 = XEXP (x, 1);
1305738fd1498Szrj 
1305838fd1498Szrj       if (x0 == x1)
1305938fd1498Szrj 	return 1 + 2 * count_rtxs (x0);
1306038fd1498Szrj 
1306138fd1498Szrj       if ((GET_RTX_CLASS (GET_CODE (x1)) == RTX_BIN_ARITH
1306238fd1498Szrj 	   || GET_RTX_CLASS (GET_CODE (x1)) == RTX_COMM_ARITH)
1306338fd1498Szrj 	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
1306438fd1498Szrj 	return 2 + 2 * count_rtxs (x0)
1306538fd1498Szrj 	       + count_rtxs (x == XEXP (x1, 0)
1306638fd1498Szrj 			     ? XEXP (x1, 1) : XEXP (x1, 0));
1306738fd1498Szrj 
1306838fd1498Szrj       if ((GET_RTX_CLASS (GET_CODE (x0)) == RTX_BIN_ARITH
1306938fd1498Szrj 	   || GET_RTX_CLASS (GET_CODE (x0)) == RTX_COMM_ARITH)
1307038fd1498Szrj 	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
1307138fd1498Szrj 	return 2 + 2 * count_rtxs (x1)
1307238fd1498Szrj 	       + count_rtxs (x == XEXP (x0, 0)
1307338fd1498Szrj 			     ? XEXP (x0, 1) : XEXP (x0, 0));
1307438fd1498Szrj     }
1307538fd1498Szrj 
1307638fd1498Szrj   fmt = GET_RTX_FORMAT (code);
1307738fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1307838fd1498Szrj     if (fmt[i] == 'e')
1307938fd1498Szrj       ret += count_rtxs (XEXP (x, i));
1308038fd1498Szrj     else if (fmt[i] == 'E')
1308138fd1498Szrj       for (j = 0; j < XVECLEN (x, i); j++)
1308238fd1498Szrj 	ret += count_rtxs (XVECEXP (x, i, j));
1308338fd1498Szrj 
1308438fd1498Szrj   return ret;
1308538fd1498Szrj }
1308638fd1498Szrj 
1308738fd1498Szrj /* Utility function for following routine.  Called when X is part of a value
1308838fd1498Szrj    being stored into last_set_value.  Sets last_set_table_tick
1308938fd1498Szrj    for each register mentioned.  Similar to mention_regs in cse.c  */
1309038fd1498Szrj 
1309138fd1498Szrj static void
update_table_tick(rtx x)1309238fd1498Szrj update_table_tick (rtx x)
1309338fd1498Szrj {
1309438fd1498Szrj   enum rtx_code code = GET_CODE (x);
1309538fd1498Szrj   const char *fmt = GET_RTX_FORMAT (code);
1309638fd1498Szrj   int i, j;
1309738fd1498Szrj 
1309838fd1498Szrj   if (code == REG)
1309938fd1498Szrj     {
1310038fd1498Szrj       unsigned int regno = REGNO (x);
1310138fd1498Szrj       unsigned int endregno = END_REGNO (x);
1310238fd1498Szrj       unsigned int r;
1310338fd1498Szrj 
1310438fd1498Szrj       for (r = regno; r < endregno; r++)
1310538fd1498Szrj 	{
1310638fd1498Szrj 	  reg_stat_type *rsp = &reg_stat[r];
1310738fd1498Szrj 	  rsp->last_set_table_tick = label_tick;
1310838fd1498Szrj 	}
1310938fd1498Szrj 
1311038fd1498Szrj       return;
1311138fd1498Szrj     }
1311238fd1498Szrj 
1311338fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1311438fd1498Szrj     if (fmt[i] == 'e')
1311538fd1498Szrj       {
1311638fd1498Szrj 	/* Check for identical subexpressions.  If x contains
1311738fd1498Szrj 	   identical subexpression we only have to traverse one of
1311838fd1498Szrj 	   them.  */
1311938fd1498Szrj 	if (i == 0 && ARITHMETIC_P (x))
1312038fd1498Szrj 	  {
1312138fd1498Szrj 	    /* Note that at this point x1 has already been
1312238fd1498Szrj 	       processed.  */
1312338fd1498Szrj 	    rtx x0 = XEXP (x, 0);
1312438fd1498Szrj 	    rtx x1 = XEXP (x, 1);
1312538fd1498Szrj 
1312638fd1498Szrj 	    /* If x0 and x1 are identical then there is no need to
1312738fd1498Szrj 	       process x0.  */
1312838fd1498Szrj 	    if (x0 == x1)
1312938fd1498Szrj 	      break;
1313038fd1498Szrj 
1313138fd1498Szrj 	    /* If x0 is identical to a subexpression of x1 then while
1313238fd1498Szrj 	       processing x1, x0 has already been processed.  Thus we
1313338fd1498Szrj 	       are done with x.  */
1313438fd1498Szrj 	    if (ARITHMETIC_P (x1)
1313538fd1498Szrj 		&& (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
1313638fd1498Szrj 	      break;
1313738fd1498Szrj 
1313838fd1498Szrj 	    /* If x1 is identical to a subexpression of x0 then we
1313938fd1498Szrj 	       still have to process the rest of x0.  */
1314038fd1498Szrj 	    if (ARITHMETIC_P (x0)
1314138fd1498Szrj 		&& (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
1314238fd1498Szrj 	      {
1314338fd1498Szrj 		update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
1314438fd1498Szrj 		break;
1314538fd1498Szrj 	      }
1314638fd1498Szrj 	  }
1314738fd1498Szrj 
1314838fd1498Szrj 	update_table_tick (XEXP (x, i));
1314938fd1498Szrj       }
1315038fd1498Szrj     else if (fmt[i] == 'E')
1315138fd1498Szrj       for (j = 0; j < XVECLEN (x, i); j++)
1315238fd1498Szrj 	update_table_tick (XVECEXP (x, i, j));
1315338fd1498Szrj }
1315438fd1498Szrj 
1315538fd1498Szrj /* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
1315638fd1498Szrj    are saying that the register is clobbered and we no longer know its
1315738fd1498Szrj    value.  If INSN is zero, don't update reg_stat[].last_set; this is
1315838fd1498Szrj    only permitted with VALUE also zero and is used to invalidate the
1315938fd1498Szrj    register.  */
1316038fd1498Szrj 
1316138fd1498Szrj static void
record_value_for_reg(rtx reg,rtx_insn * insn,rtx value)1316238fd1498Szrj record_value_for_reg (rtx reg, rtx_insn *insn, rtx value)
1316338fd1498Szrj {
1316438fd1498Szrj   unsigned int regno = REGNO (reg);
1316538fd1498Szrj   unsigned int endregno = END_REGNO (reg);
1316638fd1498Szrj   unsigned int i;
1316738fd1498Szrj   reg_stat_type *rsp;
1316838fd1498Szrj 
1316938fd1498Szrj   /* If VALUE contains REG and we have a previous value for REG, substitute
1317038fd1498Szrj      the previous value.  */
1317138fd1498Szrj   if (value && insn && reg_overlap_mentioned_p (reg, value))
1317238fd1498Szrj     {
1317338fd1498Szrj       rtx tem;
1317438fd1498Szrj 
1317538fd1498Szrj       /* Set things up so get_last_value is allowed to see anything set up to
1317638fd1498Szrj 	 our insn.  */
1317738fd1498Szrj       subst_low_luid = DF_INSN_LUID (insn);
1317838fd1498Szrj       tem = get_last_value (reg);
1317938fd1498Szrj 
1318038fd1498Szrj       /* If TEM is simply a binary operation with two CLOBBERs as operands,
1318138fd1498Szrj 	 it isn't going to be useful and will take a lot of time to process,
1318238fd1498Szrj 	 so just use the CLOBBER.  */
1318338fd1498Szrj 
1318438fd1498Szrj       if (tem)
1318538fd1498Szrj 	{
1318638fd1498Szrj 	  if (ARITHMETIC_P (tem)
1318738fd1498Szrj 	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
1318838fd1498Szrj 	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
1318938fd1498Szrj 	    tem = XEXP (tem, 0);
1319038fd1498Szrj 	  else if (count_occurrences (value, reg, 1) >= 2)
1319138fd1498Szrj 	    {
1319238fd1498Szrj 	      /* If there are two or more occurrences of REG in VALUE,
1319338fd1498Szrj 		 prevent the value from growing too much.  */
1319438fd1498Szrj 	      if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
1319538fd1498Szrj 		tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
1319638fd1498Szrj 	    }
1319738fd1498Szrj 
1319838fd1498Szrj 	  value = replace_rtx (copy_rtx (value), reg, tem);
1319938fd1498Szrj 	}
1320038fd1498Szrj     }
1320138fd1498Szrj 
1320238fd1498Szrj   /* For each register modified, show we don't know its value, that
1320338fd1498Szrj      we don't know about its bitwise content, that its value has been
1320438fd1498Szrj      updated, and that we don't know the location of the death of the
1320538fd1498Szrj      register.  */
1320638fd1498Szrj   for (i = regno; i < endregno; i++)
1320738fd1498Szrj     {
1320838fd1498Szrj       rsp = &reg_stat[i];
1320938fd1498Szrj 
1321038fd1498Szrj       if (insn)
1321138fd1498Szrj 	rsp->last_set = insn;
1321238fd1498Szrj 
1321338fd1498Szrj       rsp->last_set_value = 0;
1321438fd1498Szrj       rsp->last_set_mode = VOIDmode;
1321538fd1498Szrj       rsp->last_set_nonzero_bits = 0;
1321638fd1498Szrj       rsp->last_set_sign_bit_copies = 0;
1321738fd1498Szrj       rsp->last_death = 0;
1321838fd1498Szrj       rsp->truncated_to_mode = VOIDmode;
1321938fd1498Szrj     }
1322038fd1498Szrj 
1322138fd1498Szrj   /* Mark registers that are being referenced in this value.  */
1322238fd1498Szrj   if (value)
1322338fd1498Szrj     update_table_tick (value);
1322438fd1498Szrj 
1322538fd1498Szrj   /* Now update the status of each register being set.
1322638fd1498Szrj      If someone is using this register in this block, set this register
1322738fd1498Szrj      to invalid since we will get confused between the two lives in this
1322838fd1498Szrj      basic block.  This makes using this register always invalid.  In cse, we
1322938fd1498Szrj      scan the table to invalidate all entries using this register, but this
1323038fd1498Szrj      is too much work for us.  */
1323138fd1498Szrj 
1323238fd1498Szrj   for (i = regno; i < endregno; i++)
1323338fd1498Szrj     {
1323438fd1498Szrj       rsp = &reg_stat[i];
1323538fd1498Szrj       rsp->last_set_label = label_tick;
1323638fd1498Szrj       if (!insn
1323738fd1498Szrj 	  || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
1323838fd1498Szrj 	rsp->last_set_invalid = 1;
1323938fd1498Szrj       else
1324038fd1498Szrj 	rsp->last_set_invalid = 0;
1324138fd1498Szrj     }
1324238fd1498Szrj 
1324338fd1498Szrj   /* The value being assigned might refer to X (like in "x++;").  In that
1324438fd1498Szrj      case, we must replace it with (clobber (const_int 0)) to prevent
1324538fd1498Szrj      infinite loops.  */
1324638fd1498Szrj   rsp = &reg_stat[regno];
1324738fd1498Szrj   if (value && !get_last_value_validate (&value, insn, label_tick, 0))
1324838fd1498Szrj     {
1324938fd1498Szrj       value = copy_rtx (value);
1325038fd1498Szrj       if (!get_last_value_validate (&value, insn, label_tick, 1))
1325138fd1498Szrj 	value = 0;
1325238fd1498Szrj     }
1325338fd1498Szrj 
1325438fd1498Szrj   /* For the main register being modified, update the value, the mode, the
1325538fd1498Szrj      nonzero bits, and the number of sign bit copies.  */
1325638fd1498Szrj 
1325738fd1498Szrj   rsp->last_set_value = value;
1325838fd1498Szrj 
1325938fd1498Szrj   if (value)
1326038fd1498Szrj     {
1326138fd1498Szrj       machine_mode mode = GET_MODE (reg);
1326238fd1498Szrj       subst_low_luid = DF_INSN_LUID (insn);
1326338fd1498Szrj       rsp->last_set_mode = mode;
1326438fd1498Szrj       if (GET_MODE_CLASS (mode) == MODE_INT
1326538fd1498Szrj 	  && HWI_COMPUTABLE_MODE_P (mode))
1326638fd1498Szrj 	mode = nonzero_bits_mode;
1326738fd1498Szrj       rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
1326838fd1498Szrj       rsp->last_set_sign_bit_copies
1326938fd1498Szrj 	= num_sign_bit_copies (value, GET_MODE (reg));
1327038fd1498Szrj     }
1327138fd1498Szrj }
1327238fd1498Szrj 
1327338fd1498Szrj /* Called via note_stores from record_dead_and_set_regs to handle one
1327438fd1498Szrj    SET or CLOBBER in an insn.  DATA is the instruction in which the
1327538fd1498Szrj    set is occurring.  */
1327638fd1498Szrj 
1327738fd1498Szrj static void
record_dead_and_set_regs_1(rtx dest,const_rtx setter,void * data)1327838fd1498Szrj record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
1327938fd1498Szrj {
1328038fd1498Szrj   rtx_insn *record_dead_insn = (rtx_insn *) data;
1328138fd1498Szrj 
1328238fd1498Szrj   if (GET_CODE (dest) == SUBREG)
1328338fd1498Szrj     dest = SUBREG_REG (dest);
1328438fd1498Szrj 
1328538fd1498Szrj   if (!record_dead_insn)
1328638fd1498Szrj     {
1328738fd1498Szrj       if (REG_P (dest))
1328838fd1498Szrj 	record_value_for_reg (dest, NULL, NULL_RTX);
1328938fd1498Szrj       return;
1329038fd1498Szrj     }
1329138fd1498Szrj 
1329238fd1498Szrj   if (REG_P (dest))
1329338fd1498Szrj     {
1329438fd1498Szrj       /* If we are setting the whole register, we know its value.  Otherwise
1329538fd1498Szrj 	 show that we don't know the value.  We can handle a SUBREG if it's
1329638fd1498Szrj 	 the low part, but we must be careful with paradoxical SUBREGs on
1329738fd1498Szrj 	 RISC architectures because we cannot strip e.g. an extension around
1329838fd1498Szrj 	 a load and record the naked load since the RTL middle-end considers
1329938fd1498Szrj 	 that the upper bits are defined according to LOAD_EXTEND_OP.  */
1330038fd1498Szrj       if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
1330138fd1498Szrj 	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
1330238fd1498Szrj       else if (GET_CODE (setter) == SET
1330338fd1498Szrj 	       && GET_CODE (SET_DEST (setter)) == SUBREG
1330438fd1498Szrj 	       && SUBREG_REG (SET_DEST (setter)) == dest
1330538fd1498Szrj 	       && known_le (GET_MODE_PRECISION (GET_MODE (dest)),
1330638fd1498Szrj 			    BITS_PER_WORD)
1330738fd1498Szrj 	       && subreg_lowpart_p (SET_DEST (setter)))
1330838fd1498Szrj 	record_value_for_reg (dest, record_dead_insn,
1330938fd1498Szrj 			      WORD_REGISTER_OPERATIONS
13310*58e805e6Szrj 			      && word_register_operation_p (SET_SRC (setter))
1331138fd1498Szrj 			      && paradoxical_subreg_p (SET_DEST (setter))
1331238fd1498Szrj 			      ? SET_SRC (setter)
1331338fd1498Szrj 			      : gen_lowpart (GET_MODE (dest),
1331438fd1498Szrj 					     SET_SRC (setter)));
1331538fd1498Szrj       else
1331638fd1498Szrj 	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
1331738fd1498Szrj     }
1331838fd1498Szrj   else if (MEM_P (dest)
1331938fd1498Szrj 	   /* Ignore pushes, they clobber nothing.  */
1332038fd1498Szrj 	   && ! push_operand (dest, GET_MODE (dest)))
1332138fd1498Szrj     mem_last_set = DF_INSN_LUID (record_dead_insn);
1332238fd1498Szrj }
1332338fd1498Szrj 
1332438fd1498Szrj /* Update the records of when each REG was most recently set or killed
1332538fd1498Szrj    for the things done by INSN.  This is the last thing done in processing
1332638fd1498Szrj    INSN in the combiner loop.
1332738fd1498Szrj 
1332838fd1498Szrj    We update reg_stat[], in particular fields last_set, last_set_value,
1332938fd1498Szrj    last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
1333038fd1498Szrj    last_death, and also the similar information mem_last_set (which insn
1333138fd1498Szrj    most recently modified memory) and last_call_luid (which insn was the
1333238fd1498Szrj    most recent subroutine call).  */
1333338fd1498Szrj 
1333438fd1498Szrj static void
record_dead_and_set_regs(rtx_insn * insn)1333538fd1498Szrj record_dead_and_set_regs (rtx_insn *insn)
1333638fd1498Szrj {
1333738fd1498Szrj   rtx link;
1333838fd1498Szrj   unsigned int i;
1333938fd1498Szrj 
1334038fd1498Szrj   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1334138fd1498Szrj     {
1334238fd1498Szrj       if (REG_NOTE_KIND (link) == REG_DEAD
1334338fd1498Szrj 	  && REG_P (XEXP (link, 0)))
1334438fd1498Szrj 	{
1334538fd1498Szrj 	  unsigned int regno = REGNO (XEXP (link, 0));
1334638fd1498Szrj 	  unsigned int endregno = END_REGNO (XEXP (link, 0));
1334738fd1498Szrj 
1334838fd1498Szrj 	  for (i = regno; i < endregno; i++)
1334938fd1498Szrj 	    {
1335038fd1498Szrj 	      reg_stat_type *rsp;
1335138fd1498Szrj 
1335238fd1498Szrj 	      rsp = &reg_stat[i];
1335338fd1498Szrj 	      rsp->last_death = insn;
1335438fd1498Szrj 	    }
1335538fd1498Szrj 	}
1335638fd1498Szrj       else if (REG_NOTE_KIND (link) == REG_INC)
1335738fd1498Szrj 	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
1335838fd1498Szrj     }
1335938fd1498Szrj 
1336038fd1498Szrj   if (CALL_P (insn))
1336138fd1498Szrj     {
1336238fd1498Szrj       hard_reg_set_iterator hrsi;
1336338fd1498Szrj       EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, i, hrsi)
1336438fd1498Szrj 	{
1336538fd1498Szrj 	  reg_stat_type *rsp;
1336638fd1498Szrj 
1336738fd1498Szrj 	  rsp = &reg_stat[i];
1336838fd1498Szrj 	  rsp->last_set_invalid = 1;
1336938fd1498Szrj 	  rsp->last_set = insn;
1337038fd1498Szrj 	  rsp->last_set_value = 0;
1337138fd1498Szrj 	  rsp->last_set_mode = VOIDmode;
1337238fd1498Szrj 	  rsp->last_set_nonzero_bits = 0;
1337338fd1498Szrj 	  rsp->last_set_sign_bit_copies = 0;
1337438fd1498Szrj 	  rsp->last_death = 0;
1337538fd1498Szrj 	  rsp->truncated_to_mode = VOIDmode;
1337638fd1498Szrj 	}
1337738fd1498Szrj 
1337838fd1498Szrj       last_call_luid = mem_last_set = DF_INSN_LUID (insn);
1337938fd1498Szrj 
1338038fd1498Szrj       /* We can't combine into a call pattern.  Remember, though, that
1338138fd1498Szrj 	 the return value register is set at this LUID.  We could
1338238fd1498Szrj 	 still replace a register with the return value from the
1338338fd1498Szrj 	 wrong subroutine call!  */
1338438fd1498Szrj       note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
1338538fd1498Szrj     }
1338638fd1498Szrj   else
1338738fd1498Szrj     note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
1338838fd1498Szrj }
1338938fd1498Szrj 
1339038fd1498Szrj /* If a SUBREG has the promoted bit set, it is in fact a property of the
1339138fd1498Szrj    register present in the SUBREG, so for each such SUBREG go back and
1339238fd1498Szrj    adjust nonzero and sign bit information of the registers that are
1339338fd1498Szrj    known to have some zero/sign bits set.
1339438fd1498Szrj 
1339538fd1498Szrj    This is needed because when combine blows the SUBREGs away, the
1339638fd1498Szrj    information on zero/sign bits is lost and further combines can be
1339738fd1498Szrj    missed because of that.  */
1339838fd1498Szrj 
1339938fd1498Szrj static void
record_promoted_value(rtx_insn * insn,rtx subreg)1340038fd1498Szrj record_promoted_value (rtx_insn *insn, rtx subreg)
1340138fd1498Szrj {
1340238fd1498Szrj   struct insn_link *links;
1340338fd1498Szrj   rtx set;
1340438fd1498Szrj   unsigned int regno = REGNO (SUBREG_REG (subreg));
1340538fd1498Szrj   machine_mode mode = GET_MODE (subreg);
1340638fd1498Szrj 
1340738fd1498Szrj   if (!HWI_COMPUTABLE_MODE_P (mode))
1340838fd1498Szrj     return;
1340938fd1498Szrj 
1341038fd1498Szrj   for (links = LOG_LINKS (insn); links;)
1341138fd1498Szrj     {
1341238fd1498Szrj       reg_stat_type *rsp;
1341338fd1498Szrj 
1341438fd1498Szrj       insn = links->insn;
1341538fd1498Szrj       set = single_set (insn);
1341638fd1498Szrj 
1341738fd1498Szrj       if (! set || !REG_P (SET_DEST (set))
1341838fd1498Szrj 	  || REGNO (SET_DEST (set)) != regno
1341938fd1498Szrj 	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
1342038fd1498Szrj 	{
1342138fd1498Szrj 	  links = links->next;
1342238fd1498Szrj 	  continue;
1342338fd1498Szrj 	}
1342438fd1498Szrj 
1342538fd1498Szrj       rsp = &reg_stat[regno];
1342638fd1498Szrj       if (rsp->last_set == insn)
1342738fd1498Szrj 	{
1342838fd1498Szrj 	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
1342938fd1498Szrj 	    rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
1343038fd1498Szrj 	}
1343138fd1498Szrj 
1343238fd1498Szrj       if (REG_P (SET_SRC (set)))
1343338fd1498Szrj 	{
1343438fd1498Szrj 	  regno = REGNO (SET_SRC (set));
1343538fd1498Szrj 	  links = LOG_LINKS (insn);
1343638fd1498Szrj 	}
1343738fd1498Szrj       else
1343838fd1498Szrj 	break;
1343938fd1498Szrj     }
1344038fd1498Szrj }
1344138fd1498Szrj 
1344238fd1498Szrj /* Check if X, a register, is known to contain a value already
1344338fd1498Szrj    truncated to MODE.  In this case we can use a subreg to refer to
1344438fd1498Szrj    the truncated value even though in the generic case we would need
1344538fd1498Szrj    an explicit truncation.  */
1344638fd1498Szrj 
1344738fd1498Szrj static bool
reg_truncated_to_mode(machine_mode mode,const_rtx x)1344838fd1498Szrj reg_truncated_to_mode (machine_mode mode, const_rtx x)
1344938fd1498Szrj {
1345038fd1498Szrj   reg_stat_type *rsp = &reg_stat[REGNO (x)];
1345138fd1498Szrj   machine_mode truncated = rsp->truncated_to_mode;
1345238fd1498Szrj 
1345338fd1498Szrj   if (truncated == 0
1345438fd1498Szrj       || rsp->truncation_label < label_tick_ebb_start)
1345538fd1498Szrj     return false;
1345638fd1498Szrj   if (!partial_subreg_p (mode, truncated))
1345738fd1498Szrj     return true;
1345838fd1498Szrj   if (TRULY_NOOP_TRUNCATION_MODES_P (mode, truncated))
1345938fd1498Szrj     return true;
1346038fd1498Szrj   return false;
1346138fd1498Szrj }
1346238fd1498Szrj 
1346338fd1498Szrj /* If X is a hard reg or a subreg record the mode that the register is
1346438fd1498Szrj    accessed in.  For non-TARGET_TRULY_NOOP_TRUNCATION targets we might be
1346538fd1498Szrj    able to turn a truncate into a subreg using this information.  Return true
1346638fd1498Szrj    if traversing X is complete.  */
1346738fd1498Szrj 
1346838fd1498Szrj static bool
record_truncated_value(rtx x)1346938fd1498Szrj record_truncated_value (rtx x)
1347038fd1498Szrj {
1347138fd1498Szrj   machine_mode truncated_mode;
1347238fd1498Szrj   reg_stat_type *rsp;
1347338fd1498Szrj 
1347438fd1498Szrj   if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
1347538fd1498Szrj     {
1347638fd1498Szrj       machine_mode original_mode = GET_MODE (SUBREG_REG (x));
1347738fd1498Szrj       truncated_mode = GET_MODE (x);
1347838fd1498Szrj 
1347938fd1498Szrj       if (!partial_subreg_p (truncated_mode, original_mode))
1348038fd1498Szrj 	return true;
1348138fd1498Szrj 
1348238fd1498Szrj       truncated_mode = GET_MODE (x);
1348338fd1498Szrj       if (TRULY_NOOP_TRUNCATION_MODES_P (truncated_mode, original_mode))
1348438fd1498Szrj 	return true;
1348538fd1498Szrj 
1348638fd1498Szrj       x = SUBREG_REG (x);
1348738fd1498Szrj     }
1348838fd1498Szrj   /* ??? For hard-regs we now record everything.  We might be able to
1348938fd1498Szrj      optimize this using last_set_mode.  */
1349038fd1498Szrj   else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1349138fd1498Szrj     truncated_mode = GET_MODE (x);
1349238fd1498Szrj   else
1349338fd1498Szrj     return false;
1349438fd1498Szrj 
1349538fd1498Szrj   rsp = &reg_stat[REGNO (x)];
1349638fd1498Szrj   if (rsp->truncated_to_mode == 0
1349738fd1498Szrj       || rsp->truncation_label < label_tick_ebb_start
1349838fd1498Szrj       || partial_subreg_p (truncated_mode, rsp->truncated_to_mode))
1349938fd1498Szrj     {
1350038fd1498Szrj       rsp->truncated_to_mode = truncated_mode;
1350138fd1498Szrj       rsp->truncation_label = label_tick;
1350238fd1498Szrj     }
1350338fd1498Szrj 
1350438fd1498Szrj   return true;
1350538fd1498Szrj }
1350638fd1498Szrj 
1350738fd1498Szrj /* Callback for note_uses.  Find hardregs and subregs of pseudos and
1350838fd1498Szrj    the modes they are used in.  This can help truning TRUNCATEs into
1350938fd1498Szrj    SUBREGs.  */
1351038fd1498Szrj 
1351138fd1498Szrj static void
record_truncated_values(rtx * loc,void * data ATTRIBUTE_UNUSED)1351238fd1498Szrj record_truncated_values (rtx *loc, void *data ATTRIBUTE_UNUSED)
1351338fd1498Szrj {
1351438fd1498Szrj   subrtx_var_iterator::array_type array;
1351538fd1498Szrj   FOR_EACH_SUBRTX_VAR (iter, array, *loc, NONCONST)
1351638fd1498Szrj     if (record_truncated_value (*iter))
1351738fd1498Szrj       iter.skip_subrtxes ();
1351838fd1498Szrj }
1351938fd1498Szrj 
1352038fd1498Szrj /* Scan X for promoted SUBREGs.  For each one found,
1352138fd1498Szrj    note what it implies to the registers used in it.  */
1352238fd1498Szrj 
1352338fd1498Szrj static void
check_promoted_subreg(rtx_insn * insn,rtx x)1352438fd1498Szrj check_promoted_subreg (rtx_insn *insn, rtx x)
1352538fd1498Szrj {
1352638fd1498Szrj   if (GET_CODE (x) == SUBREG
1352738fd1498Szrj       && SUBREG_PROMOTED_VAR_P (x)
1352838fd1498Szrj       && REG_P (SUBREG_REG (x)))
1352938fd1498Szrj     record_promoted_value (insn, x);
1353038fd1498Szrj   else
1353138fd1498Szrj     {
1353238fd1498Szrj       const char *format = GET_RTX_FORMAT (GET_CODE (x));
1353338fd1498Szrj       int i, j;
1353438fd1498Szrj 
1353538fd1498Szrj       for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
1353638fd1498Szrj 	switch (format[i])
1353738fd1498Szrj 	  {
1353838fd1498Szrj 	  case 'e':
1353938fd1498Szrj 	    check_promoted_subreg (insn, XEXP (x, i));
1354038fd1498Szrj 	    break;
1354138fd1498Szrj 	  case 'V':
1354238fd1498Szrj 	  case 'E':
1354338fd1498Szrj 	    if (XVEC (x, i) != 0)
1354438fd1498Szrj 	      for (j = 0; j < XVECLEN (x, i); j++)
1354538fd1498Szrj 		check_promoted_subreg (insn, XVECEXP (x, i, j));
1354638fd1498Szrj 	    break;
1354738fd1498Szrj 	  }
1354838fd1498Szrj     }
1354938fd1498Szrj }
1355038fd1498Szrj 
1355138fd1498Szrj /* Verify that all the registers and memory references mentioned in *LOC are
1355238fd1498Szrj    still valid.  *LOC was part of a value set in INSN when label_tick was
1355338fd1498Szrj    equal to TICK.  Return 0 if some are not.  If REPLACE is nonzero, replace
1355438fd1498Szrj    the invalid references with (clobber (const_int 0)) and return 1.  This
1355538fd1498Szrj    replacement is useful because we often can get useful information about
1355638fd1498Szrj    the form of a value (e.g., if it was produced by a shift that always
1355738fd1498Szrj    produces -1 or 0) even though we don't know exactly what registers it
1355838fd1498Szrj    was produced from.  */
1355938fd1498Szrj 
1356038fd1498Szrj static int
get_last_value_validate(rtx * loc,rtx_insn * insn,int tick,int replace)1356138fd1498Szrj get_last_value_validate (rtx *loc, rtx_insn *insn, int tick, int replace)
1356238fd1498Szrj {
1356338fd1498Szrj   rtx x = *loc;
1356438fd1498Szrj   const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
1356538fd1498Szrj   int len = GET_RTX_LENGTH (GET_CODE (x));
1356638fd1498Szrj   int i, j;
1356738fd1498Szrj 
1356838fd1498Szrj   if (REG_P (x))
1356938fd1498Szrj     {
1357038fd1498Szrj       unsigned int regno = REGNO (x);
1357138fd1498Szrj       unsigned int endregno = END_REGNO (x);
1357238fd1498Szrj       unsigned int j;
1357338fd1498Szrj 
1357438fd1498Szrj       for (j = regno; j < endregno; j++)
1357538fd1498Szrj 	{
1357638fd1498Szrj 	  reg_stat_type *rsp = &reg_stat[j];
1357738fd1498Szrj 	  if (rsp->last_set_invalid
1357838fd1498Szrj 	      /* If this is a pseudo-register that was only set once and not
1357938fd1498Szrj 		 live at the beginning of the function, it is always valid.  */
1358038fd1498Szrj 	      || (! (regno >= FIRST_PSEUDO_REGISTER
1358138fd1498Szrj 		     && regno < reg_n_sets_max
1358238fd1498Szrj 		     && REG_N_SETS (regno) == 1
1358338fd1498Szrj 		     && (!REGNO_REG_SET_P
1358438fd1498Szrj 			 (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
1358538fd1498Szrj 			  regno)))
1358638fd1498Szrj 		  && rsp->last_set_label > tick))
1358738fd1498Szrj 	  {
1358838fd1498Szrj 	    if (replace)
1358938fd1498Szrj 	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
1359038fd1498Szrj 	    return replace;
1359138fd1498Szrj 	  }
1359238fd1498Szrj 	}
1359338fd1498Szrj 
1359438fd1498Szrj       return 1;
1359538fd1498Szrj     }
1359638fd1498Szrj   /* If this is a memory reference, make sure that there were no stores after
1359738fd1498Szrj      it that might have clobbered the value.  We don't have alias info, so we
1359838fd1498Szrj      assume any store invalidates it.  Moreover, we only have local UIDs, so
1359938fd1498Szrj      we also assume that there were stores in the intervening basic blocks.  */
1360038fd1498Szrj   else if (MEM_P (x) && !MEM_READONLY_P (x)
1360138fd1498Szrj 	   && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
1360238fd1498Szrj     {
1360338fd1498Szrj       if (replace)
1360438fd1498Szrj 	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
1360538fd1498Szrj       return replace;
1360638fd1498Szrj     }
1360738fd1498Szrj 
1360838fd1498Szrj   for (i = 0; i < len; i++)
1360938fd1498Szrj     {
1361038fd1498Szrj       if (fmt[i] == 'e')
1361138fd1498Szrj 	{
1361238fd1498Szrj 	  /* Check for identical subexpressions.  If x contains
1361338fd1498Szrj 	     identical subexpression we only have to traverse one of
1361438fd1498Szrj 	     them.  */
1361538fd1498Szrj 	  if (i == 1 && ARITHMETIC_P (x))
1361638fd1498Szrj 	    {
1361738fd1498Szrj 	      /* Note that at this point x0 has already been checked
1361838fd1498Szrj 		 and found valid.  */
1361938fd1498Szrj 	      rtx x0 = XEXP (x, 0);
1362038fd1498Szrj 	      rtx x1 = XEXP (x, 1);
1362138fd1498Szrj 
1362238fd1498Szrj 	      /* If x0 and x1 are identical then x is also valid.  */
1362338fd1498Szrj 	      if (x0 == x1)
1362438fd1498Szrj 		return 1;
1362538fd1498Szrj 
1362638fd1498Szrj 	      /* If x1 is identical to a subexpression of x0 then
1362738fd1498Szrj 		 while checking x0, x1 has already been checked.  Thus
1362838fd1498Szrj 		 it is valid and so as x.  */
1362938fd1498Szrj 	      if (ARITHMETIC_P (x0)
1363038fd1498Szrj 		  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
1363138fd1498Szrj 		return 1;
1363238fd1498Szrj 
1363338fd1498Szrj 	      /* If x0 is identical to a subexpression of x1 then x is
1363438fd1498Szrj 		 valid iff the rest of x1 is valid.  */
1363538fd1498Szrj 	      if (ARITHMETIC_P (x1)
1363638fd1498Szrj 		  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
1363738fd1498Szrj 		return
1363838fd1498Szrj 		  get_last_value_validate (&XEXP (x1,
1363938fd1498Szrj 						  x0 == XEXP (x1, 0) ? 1 : 0),
1364038fd1498Szrj 					   insn, tick, replace);
1364138fd1498Szrj 	    }
1364238fd1498Szrj 
1364338fd1498Szrj 	  if (get_last_value_validate (&XEXP (x, i), insn, tick,
1364438fd1498Szrj 				       replace) == 0)
1364538fd1498Szrj 	    return 0;
1364638fd1498Szrj 	}
1364738fd1498Szrj       else if (fmt[i] == 'E')
1364838fd1498Szrj 	for (j = 0; j < XVECLEN (x, i); j++)
1364938fd1498Szrj 	  if (get_last_value_validate (&XVECEXP (x, i, j),
1365038fd1498Szrj 				       insn, tick, replace) == 0)
1365138fd1498Szrj 	    return 0;
1365238fd1498Szrj     }
1365338fd1498Szrj 
1365438fd1498Szrj   /* If we haven't found a reason for it to be invalid, it is valid.  */
1365538fd1498Szrj   return 1;
1365638fd1498Szrj }
1365738fd1498Szrj 
1365838fd1498Szrj /* Get the last value assigned to X, if known.  Some registers
1365938fd1498Szrj    in the value may be replaced with (clobber (const_int 0)) if their value
1366038fd1498Szrj    is known longer known reliably.  */
1366138fd1498Szrj 
1366238fd1498Szrj static rtx
get_last_value(const_rtx x)1366338fd1498Szrj get_last_value (const_rtx x)
1366438fd1498Szrj {
1366538fd1498Szrj   unsigned int regno;
1366638fd1498Szrj   rtx value;
1366738fd1498Szrj   reg_stat_type *rsp;
1366838fd1498Szrj 
1366938fd1498Szrj   /* If this is a non-paradoxical SUBREG, get the value of its operand and
1367038fd1498Szrj      then convert it to the desired mode.  If this is a paradoxical SUBREG,
1367138fd1498Szrj      we cannot predict what values the "extra" bits might have.  */
1367238fd1498Szrj   if (GET_CODE (x) == SUBREG
1367338fd1498Szrj       && subreg_lowpart_p (x)
1367438fd1498Szrj       && !paradoxical_subreg_p (x)
1367538fd1498Szrj       && (value = get_last_value (SUBREG_REG (x))) != 0)
1367638fd1498Szrj     return gen_lowpart (GET_MODE (x), value);
1367738fd1498Szrj 
1367838fd1498Szrj   if (!REG_P (x))
1367938fd1498Szrj     return 0;
1368038fd1498Szrj 
1368138fd1498Szrj   regno = REGNO (x);
1368238fd1498Szrj   rsp = &reg_stat[regno];
1368338fd1498Szrj   value = rsp->last_set_value;
1368438fd1498Szrj 
1368538fd1498Szrj   /* If we don't have a value, or if it isn't for this basic block and
1368638fd1498Szrj      it's either a hard register, set more than once, or it's a live
1368738fd1498Szrj      at the beginning of the function, return 0.
1368838fd1498Szrj 
1368938fd1498Szrj      Because if it's not live at the beginning of the function then the reg
1369038fd1498Szrj      is always set before being used (is never used without being set).
1369138fd1498Szrj      And, if it's set only once, and it's always set before use, then all
1369238fd1498Szrj      uses must have the same last value, even if it's not from this basic
1369338fd1498Szrj      block.  */
1369438fd1498Szrj 
1369538fd1498Szrj   if (value == 0
1369638fd1498Szrj       || (rsp->last_set_label < label_tick_ebb_start
1369738fd1498Szrj 	  && (regno < FIRST_PSEUDO_REGISTER
1369838fd1498Szrj 	      || regno >= reg_n_sets_max
1369938fd1498Szrj 	      || REG_N_SETS (regno) != 1
1370038fd1498Szrj 	      || REGNO_REG_SET_P
1370138fd1498Szrj 		 (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), regno))))
1370238fd1498Szrj     return 0;
1370338fd1498Szrj 
1370438fd1498Szrj   /* If the value was set in a later insn than the ones we are processing,
1370538fd1498Szrj      we can't use it even if the register was only set once.  */
1370638fd1498Szrj   if (rsp->last_set_label == label_tick
1370738fd1498Szrj       && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
1370838fd1498Szrj     return 0;
1370938fd1498Szrj 
1371038fd1498Szrj   /* If fewer bits were set than what we are asked for now, we cannot use
1371138fd1498Szrj      the value.  */
1371238fd1498Szrj   if (maybe_lt (GET_MODE_PRECISION (rsp->last_set_mode),
1371338fd1498Szrj 		GET_MODE_PRECISION (GET_MODE (x))))
1371438fd1498Szrj     return 0;
1371538fd1498Szrj 
1371638fd1498Szrj   /* If the value has all its registers valid, return it.  */
1371738fd1498Szrj   if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 0))
1371838fd1498Szrj     return value;
1371938fd1498Szrj 
1372038fd1498Szrj   /* Otherwise, make a copy and replace any invalid register with
1372138fd1498Szrj      (clobber (const_int 0)).  If that fails for some reason, return 0.  */
1372238fd1498Szrj 
1372338fd1498Szrj   value = copy_rtx (value);
1372438fd1498Szrj   if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 1))
1372538fd1498Szrj     return value;
1372638fd1498Szrj 
1372738fd1498Szrj   return 0;
1372838fd1498Szrj }
1372938fd1498Szrj 
1373038fd1498Szrj /* Define three variables used for communication between the following
1373138fd1498Szrj    routines.  */
1373238fd1498Szrj 
1373338fd1498Szrj static unsigned int reg_dead_regno, reg_dead_endregno;
1373438fd1498Szrj static int reg_dead_flag;
1373538fd1498Szrj 
1373638fd1498Szrj /* Function called via note_stores from reg_dead_at_p.
1373738fd1498Szrj 
1373838fd1498Szrj    If DEST is within [reg_dead_regno, reg_dead_endregno), set
1373938fd1498Szrj    reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
1374038fd1498Szrj 
1374138fd1498Szrj static void
reg_dead_at_p_1(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)1374238fd1498Szrj reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
1374338fd1498Szrj {
1374438fd1498Szrj   unsigned int regno, endregno;
1374538fd1498Szrj 
1374638fd1498Szrj   if (!REG_P (dest))
1374738fd1498Szrj     return;
1374838fd1498Szrj 
1374938fd1498Szrj   regno = REGNO (dest);
1375038fd1498Szrj   endregno = END_REGNO (dest);
1375138fd1498Szrj   if (reg_dead_endregno > regno && reg_dead_regno < endregno)
1375238fd1498Szrj     reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
1375338fd1498Szrj }
1375438fd1498Szrj 
1375538fd1498Szrj /* Return nonzero if REG is known to be dead at INSN.
1375638fd1498Szrj 
1375738fd1498Szrj    We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
1375838fd1498Szrj    referencing REG, it is dead.  If we hit a SET referencing REG, it is
1375938fd1498Szrj    live.  Otherwise, see if it is live or dead at the start of the basic
1376038fd1498Szrj    block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
1376138fd1498Szrj    must be assumed to be always live.  */
1376238fd1498Szrj 
1376338fd1498Szrj static int
reg_dead_at_p(rtx reg,rtx_insn * insn)1376438fd1498Szrj reg_dead_at_p (rtx reg, rtx_insn *insn)
1376538fd1498Szrj {
1376638fd1498Szrj   basic_block block;
1376738fd1498Szrj   unsigned int i;
1376838fd1498Szrj 
1376938fd1498Szrj   /* Set variables for reg_dead_at_p_1.  */
1377038fd1498Szrj   reg_dead_regno = REGNO (reg);
1377138fd1498Szrj   reg_dead_endregno = END_REGNO (reg);
1377238fd1498Szrj 
1377338fd1498Szrj   reg_dead_flag = 0;
1377438fd1498Szrj 
1377538fd1498Szrj   /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  For fixed registers
1377638fd1498Szrj      we allow the machine description to decide whether use-and-clobber
1377738fd1498Szrj      patterns are OK.  */
1377838fd1498Szrj   if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
1377938fd1498Szrj     {
1378038fd1498Szrj       for (i = reg_dead_regno; i < reg_dead_endregno; i++)
1378138fd1498Szrj 	if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
1378238fd1498Szrj 	  return 0;
1378338fd1498Szrj     }
1378438fd1498Szrj 
1378538fd1498Szrj   /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
1378638fd1498Szrj      beginning of basic block.  */
1378738fd1498Szrj   block = BLOCK_FOR_INSN (insn);
1378838fd1498Szrj   for (;;)
1378938fd1498Szrj     {
1379038fd1498Szrj       if (INSN_P (insn))
1379138fd1498Szrj         {
1379238fd1498Szrj 	  if (find_regno_note (insn, REG_UNUSED, reg_dead_regno))
1379338fd1498Szrj 	    return 1;
1379438fd1498Szrj 
1379538fd1498Szrj 	  note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
1379638fd1498Szrj 	  if (reg_dead_flag)
1379738fd1498Szrj 	    return reg_dead_flag == 1 ? 1 : 0;
1379838fd1498Szrj 
1379938fd1498Szrj 	  if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
1380038fd1498Szrj 	    return 1;
1380138fd1498Szrj         }
1380238fd1498Szrj 
1380338fd1498Szrj       if (insn == BB_HEAD (block))
1380438fd1498Szrj 	break;
1380538fd1498Szrj 
1380638fd1498Szrj       insn = PREV_INSN (insn);
1380738fd1498Szrj     }
1380838fd1498Szrj 
1380938fd1498Szrj   /* Look at live-in sets for the basic block that we were in.  */
1381038fd1498Szrj   for (i = reg_dead_regno; i < reg_dead_endregno; i++)
1381138fd1498Szrj     if (REGNO_REG_SET_P (df_get_live_in (block), i))
1381238fd1498Szrj       return 0;
1381338fd1498Szrj 
1381438fd1498Szrj   return 1;
1381538fd1498Szrj }
1381638fd1498Szrj 
1381738fd1498Szrj /* Note hard registers in X that are used.  */
1381838fd1498Szrj 
1381938fd1498Szrj static void
mark_used_regs_combine(rtx x)1382038fd1498Szrj mark_used_regs_combine (rtx x)
1382138fd1498Szrj {
1382238fd1498Szrj   RTX_CODE code = GET_CODE (x);
1382338fd1498Szrj   unsigned int regno;
1382438fd1498Szrj   int i;
1382538fd1498Szrj 
1382638fd1498Szrj   switch (code)
1382738fd1498Szrj     {
1382838fd1498Szrj     case LABEL_REF:
1382938fd1498Szrj     case SYMBOL_REF:
1383038fd1498Szrj     case CONST:
1383138fd1498Szrj     CASE_CONST_ANY:
1383238fd1498Szrj     case PC:
1383338fd1498Szrj     case ADDR_VEC:
1383438fd1498Szrj     case ADDR_DIFF_VEC:
1383538fd1498Szrj     case ASM_INPUT:
1383638fd1498Szrj     /* CC0 must die in the insn after it is set, so we don't need to take
1383738fd1498Szrj        special note of it here.  */
1383838fd1498Szrj     case CC0:
1383938fd1498Szrj       return;
1384038fd1498Szrj 
1384138fd1498Szrj     case CLOBBER:
1384238fd1498Szrj       /* If we are clobbering a MEM, mark any hard registers inside the
1384338fd1498Szrj 	 address as used.  */
1384438fd1498Szrj       if (MEM_P (XEXP (x, 0)))
1384538fd1498Szrj 	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
1384638fd1498Szrj       return;
1384738fd1498Szrj 
1384838fd1498Szrj     case REG:
1384938fd1498Szrj       regno = REGNO (x);
1385038fd1498Szrj       /* A hard reg in a wide mode may really be multiple registers.
1385138fd1498Szrj 	 If so, mark all of them just like the first.  */
1385238fd1498Szrj       if (regno < FIRST_PSEUDO_REGISTER)
1385338fd1498Szrj 	{
1385438fd1498Szrj 	  /* None of this applies to the stack, frame or arg pointers.  */
1385538fd1498Szrj 	  if (regno == STACK_POINTER_REGNUM
1385638fd1498Szrj 	      || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
1385738fd1498Szrj 		  && regno == HARD_FRAME_POINTER_REGNUM)
1385838fd1498Szrj 	      || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1385938fd1498Szrj 		  && regno == ARG_POINTER_REGNUM && fixed_regs[regno])
1386038fd1498Szrj 	      || regno == FRAME_POINTER_REGNUM)
1386138fd1498Szrj 	    return;
1386238fd1498Szrj 
1386338fd1498Szrj 	  add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
1386438fd1498Szrj 	}
1386538fd1498Szrj       return;
1386638fd1498Szrj 
1386738fd1498Szrj     case SET:
1386838fd1498Szrj       {
1386938fd1498Szrj 	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
1387038fd1498Szrj 	   the address.  */
1387138fd1498Szrj 	rtx testreg = SET_DEST (x);
1387238fd1498Szrj 
1387338fd1498Szrj 	while (GET_CODE (testreg) == SUBREG
1387438fd1498Szrj 	       || GET_CODE (testreg) == ZERO_EXTRACT
1387538fd1498Szrj 	       || GET_CODE (testreg) == STRICT_LOW_PART)
1387638fd1498Szrj 	  testreg = XEXP (testreg, 0);
1387738fd1498Szrj 
1387838fd1498Szrj 	if (MEM_P (testreg))
1387938fd1498Szrj 	  mark_used_regs_combine (XEXP (testreg, 0));
1388038fd1498Szrj 
1388138fd1498Szrj 	mark_used_regs_combine (SET_SRC (x));
1388238fd1498Szrj       }
1388338fd1498Szrj       return;
1388438fd1498Szrj 
1388538fd1498Szrj     default:
1388638fd1498Szrj       break;
1388738fd1498Szrj     }
1388838fd1498Szrj 
1388938fd1498Szrj   /* Recursively scan the operands of this expression.  */
1389038fd1498Szrj 
1389138fd1498Szrj   {
1389238fd1498Szrj     const char *fmt = GET_RTX_FORMAT (code);
1389338fd1498Szrj 
1389438fd1498Szrj     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1389538fd1498Szrj       {
1389638fd1498Szrj 	if (fmt[i] == 'e')
1389738fd1498Szrj 	  mark_used_regs_combine (XEXP (x, i));
1389838fd1498Szrj 	else if (fmt[i] == 'E')
1389938fd1498Szrj 	  {
1390038fd1498Szrj 	    int j;
1390138fd1498Szrj 
1390238fd1498Szrj 	    for (j = 0; j < XVECLEN (x, i); j++)
1390338fd1498Szrj 	      mark_used_regs_combine (XVECEXP (x, i, j));
1390438fd1498Szrj 	  }
1390538fd1498Szrj       }
1390638fd1498Szrj   }
1390738fd1498Szrj }
1390838fd1498Szrj 
1390938fd1498Szrj /* Remove register number REGNO from the dead registers list of INSN.
1391038fd1498Szrj 
1391138fd1498Szrj    Return the note used to record the death, if there was one.  */
1391238fd1498Szrj 
1391338fd1498Szrj rtx
remove_death(unsigned int regno,rtx_insn * insn)1391438fd1498Szrj remove_death (unsigned int regno, rtx_insn *insn)
1391538fd1498Szrj {
1391638fd1498Szrj   rtx note = find_regno_note (insn, REG_DEAD, regno);
1391738fd1498Szrj 
1391838fd1498Szrj   if (note)
1391938fd1498Szrj     remove_note (insn, note);
1392038fd1498Szrj 
1392138fd1498Szrj   return note;
1392238fd1498Szrj }
1392338fd1498Szrj 
1392438fd1498Szrj /* For each register (hardware or pseudo) used within expression X, if its
1392538fd1498Szrj    death is in an instruction with luid between FROM_LUID (inclusive) and
1392638fd1498Szrj    TO_INSN (exclusive), put a REG_DEAD note for that register in the
1392738fd1498Szrj    list headed by PNOTES.
1392838fd1498Szrj 
1392938fd1498Szrj    That said, don't move registers killed by maybe_kill_insn.
1393038fd1498Szrj 
1393138fd1498Szrj    This is done when X is being merged by combination into TO_INSN.  These
1393238fd1498Szrj    notes will then be distributed as needed.  */
1393338fd1498Szrj 
1393438fd1498Szrj static void
move_deaths(rtx x,rtx maybe_kill_insn,int from_luid,rtx_insn * to_insn,rtx * pnotes)1393538fd1498Szrj move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx_insn *to_insn,
1393638fd1498Szrj 	     rtx *pnotes)
1393738fd1498Szrj {
1393838fd1498Szrj   const char *fmt;
1393938fd1498Szrj   int len, i;
1394038fd1498Szrj   enum rtx_code code = GET_CODE (x);
1394138fd1498Szrj 
1394238fd1498Szrj   if (code == REG)
1394338fd1498Szrj     {
1394438fd1498Szrj       unsigned int regno = REGNO (x);
1394538fd1498Szrj       rtx_insn *where_dead = reg_stat[regno].last_death;
1394638fd1498Szrj 
1394738fd1498Szrj       /* If we do not know where the register died, it may still die between
1394838fd1498Szrj 	 FROM_LUID and TO_INSN.  If so, find it.  This is PR83304.  */
1394938fd1498Szrj       if (!where_dead || DF_INSN_LUID (where_dead) >= DF_INSN_LUID (to_insn))
1395038fd1498Szrj 	{
1395138fd1498Szrj 	  rtx_insn *insn = prev_real_nondebug_insn (to_insn);
1395238fd1498Szrj 	  while (insn
1395338fd1498Szrj 		 && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (to_insn)
1395438fd1498Szrj 		 && DF_INSN_LUID (insn) >= from_luid)
1395538fd1498Szrj 	    {
1395638fd1498Szrj 	      if (dead_or_set_regno_p (insn, regno))
1395738fd1498Szrj 		{
1395838fd1498Szrj 		  if (find_regno_note (insn, REG_DEAD, regno))
1395938fd1498Szrj 		    where_dead = insn;
1396038fd1498Szrj 		  break;
1396138fd1498Szrj 		}
1396238fd1498Szrj 
1396338fd1498Szrj 	      insn = prev_real_nondebug_insn (insn);
1396438fd1498Szrj 	    }
1396538fd1498Szrj 	}
1396638fd1498Szrj 
1396738fd1498Szrj       /* Don't move the register if it gets killed in between from and to.  */
1396838fd1498Szrj       if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
1396938fd1498Szrj 	  && ! reg_referenced_p (x, maybe_kill_insn))
1397038fd1498Szrj 	return;
1397138fd1498Szrj 
1397238fd1498Szrj       if (where_dead
1397338fd1498Szrj 	  && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
1397438fd1498Szrj 	  && DF_INSN_LUID (where_dead) >= from_luid
1397538fd1498Szrj 	  && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
1397638fd1498Szrj 	{
1397738fd1498Szrj 	  rtx note = remove_death (regno, where_dead);
1397838fd1498Szrj 
1397938fd1498Szrj 	  /* It is possible for the call above to return 0.  This can occur
1398038fd1498Szrj 	     when last_death points to I2 or I1 that we combined with.
1398138fd1498Szrj 	     In that case make a new note.
1398238fd1498Szrj 
1398338fd1498Szrj 	     We must also check for the case where X is a hard register
1398438fd1498Szrj 	     and NOTE is a death note for a range of hard registers
1398538fd1498Szrj 	     including X.  In that case, we must put REG_DEAD notes for
1398638fd1498Szrj 	     the remaining registers in place of NOTE.  */
1398738fd1498Szrj 
1398838fd1498Szrj 	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
1398938fd1498Szrj 	      && partial_subreg_p (GET_MODE (x), GET_MODE (XEXP (note, 0))))
1399038fd1498Szrj 	    {
1399138fd1498Szrj 	      unsigned int deadregno = REGNO (XEXP (note, 0));
1399238fd1498Szrj 	      unsigned int deadend = END_REGNO (XEXP (note, 0));
1399338fd1498Szrj 	      unsigned int ourend = END_REGNO (x);
1399438fd1498Szrj 	      unsigned int i;
1399538fd1498Szrj 
1399638fd1498Szrj 	      for (i = deadregno; i < deadend; i++)
1399738fd1498Szrj 		if (i < regno || i >= ourend)
1399838fd1498Szrj 		  add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
1399938fd1498Szrj 	    }
1400038fd1498Szrj 
1400138fd1498Szrj 	  /* If we didn't find any note, or if we found a REG_DEAD note that
1400238fd1498Szrj 	     covers only part of the given reg, and we have a multi-reg hard
1400338fd1498Szrj 	     register, then to be safe we must check for REG_DEAD notes
1400438fd1498Szrj 	     for each register other than the first.  They could have
1400538fd1498Szrj 	     their own REG_DEAD notes lying around.  */
1400638fd1498Szrj 	  else if ((note == 0
1400738fd1498Szrj 		    || (note != 0
1400838fd1498Szrj 			&& partial_subreg_p (GET_MODE (XEXP (note, 0)),
1400938fd1498Szrj 					     GET_MODE (x))))
1401038fd1498Szrj 		   && regno < FIRST_PSEUDO_REGISTER
1401138fd1498Szrj 		   && REG_NREGS (x) > 1)
1401238fd1498Szrj 	    {
1401338fd1498Szrj 	      unsigned int ourend = END_REGNO (x);
1401438fd1498Szrj 	      unsigned int i, offset;
1401538fd1498Szrj 	      rtx oldnotes = 0;
1401638fd1498Szrj 
1401738fd1498Szrj 	      if (note)
1401838fd1498Szrj 		offset = hard_regno_nregs (regno, GET_MODE (XEXP (note, 0)));
1401938fd1498Szrj 	      else
1402038fd1498Szrj 		offset = 1;
1402138fd1498Szrj 
1402238fd1498Szrj 	      for (i = regno + offset; i < ourend; i++)
1402338fd1498Szrj 		move_deaths (regno_reg_rtx[i],
1402438fd1498Szrj 			     maybe_kill_insn, from_luid, to_insn, &oldnotes);
1402538fd1498Szrj 	    }
1402638fd1498Szrj 
1402738fd1498Szrj 	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
1402838fd1498Szrj 	    {
1402938fd1498Szrj 	      XEXP (note, 1) = *pnotes;
1403038fd1498Szrj 	      *pnotes = note;
1403138fd1498Szrj 	    }
1403238fd1498Szrj 	  else
1403338fd1498Szrj 	    *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
1403438fd1498Szrj 	}
1403538fd1498Szrj 
1403638fd1498Szrj       return;
1403738fd1498Szrj     }
1403838fd1498Szrj 
1403938fd1498Szrj   else if (GET_CODE (x) == SET)
1404038fd1498Szrj     {
1404138fd1498Szrj       rtx dest = SET_DEST (x);
1404238fd1498Szrj 
1404338fd1498Szrj       move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
1404438fd1498Szrj 
1404538fd1498Szrj       /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
1404638fd1498Szrj 	 that accesses one word of a multi-word item, some
1404738fd1498Szrj 	 piece of everything register in the expression is used by
1404838fd1498Szrj 	 this insn, so remove any old death.  */
1404938fd1498Szrj       /* ??? So why do we test for equality of the sizes?  */
1405038fd1498Szrj 
1405138fd1498Szrj       if (GET_CODE (dest) == ZERO_EXTRACT
1405238fd1498Szrj 	  || GET_CODE (dest) == STRICT_LOW_PART
1405338fd1498Szrj 	  || (GET_CODE (dest) == SUBREG
1405438fd1498Szrj 	      && !read_modify_subreg_p (dest)))
1405538fd1498Szrj 	{
1405638fd1498Szrj 	  move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
1405738fd1498Szrj 	  return;
1405838fd1498Szrj 	}
1405938fd1498Szrj 
1406038fd1498Szrj       /* If this is some other SUBREG, we know it replaces the entire
1406138fd1498Szrj 	 value, so use that as the destination.  */
1406238fd1498Szrj       if (GET_CODE (dest) == SUBREG)
1406338fd1498Szrj 	dest = SUBREG_REG (dest);
1406438fd1498Szrj 
1406538fd1498Szrj       /* If this is a MEM, adjust deaths of anything used in the address.
1406638fd1498Szrj 	 For a REG (the only other possibility), the entire value is
1406738fd1498Szrj 	 being replaced so the old value is not used in this insn.  */
1406838fd1498Szrj 
1406938fd1498Szrj       if (MEM_P (dest))
1407038fd1498Szrj 	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
1407138fd1498Szrj 		     to_insn, pnotes);
1407238fd1498Szrj       return;
1407338fd1498Szrj     }
1407438fd1498Szrj 
1407538fd1498Szrj   else if (GET_CODE (x) == CLOBBER)
1407638fd1498Szrj     return;
1407738fd1498Szrj 
1407838fd1498Szrj   len = GET_RTX_LENGTH (code);
1407938fd1498Szrj   fmt = GET_RTX_FORMAT (code);
1408038fd1498Szrj 
1408138fd1498Szrj   for (i = 0; i < len; i++)
1408238fd1498Szrj     {
1408338fd1498Szrj       if (fmt[i] == 'E')
1408438fd1498Szrj 	{
1408538fd1498Szrj 	  int j;
1408638fd1498Szrj 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1408738fd1498Szrj 	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
1408838fd1498Szrj 			 to_insn, pnotes);
1408938fd1498Szrj 	}
1409038fd1498Szrj       else if (fmt[i] == 'e')
1409138fd1498Szrj 	move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
1409238fd1498Szrj     }
1409338fd1498Szrj }
1409438fd1498Szrj 
1409538fd1498Szrj /* Return 1 if X is the target of a bit-field assignment in BODY, the
1409638fd1498Szrj    pattern of an insn.  X must be a REG.  */
1409738fd1498Szrj 
1409838fd1498Szrj static int
reg_bitfield_target_p(rtx x,rtx body)1409938fd1498Szrj reg_bitfield_target_p (rtx x, rtx body)
1410038fd1498Szrj {
1410138fd1498Szrj   int i;
1410238fd1498Szrj 
1410338fd1498Szrj   if (GET_CODE (body) == SET)
1410438fd1498Szrj     {
1410538fd1498Szrj       rtx dest = SET_DEST (body);
1410638fd1498Szrj       rtx target;
1410738fd1498Szrj       unsigned int regno, tregno, endregno, endtregno;
1410838fd1498Szrj 
1410938fd1498Szrj       if (GET_CODE (dest) == ZERO_EXTRACT)
1411038fd1498Szrj 	target = XEXP (dest, 0);
1411138fd1498Szrj       else if (GET_CODE (dest) == STRICT_LOW_PART)
1411238fd1498Szrj 	target = SUBREG_REG (XEXP (dest, 0));
1411338fd1498Szrj       else
1411438fd1498Szrj 	return 0;
1411538fd1498Szrj 
1411638fd1498Szrj       if (GET_CODE (target) == SUBREG)
1411738fd1498Szrj 	target = SUBREG_REG (target);
1411838fd1498Szrj 
1411938fd1498Szrj       if (!REG_P (target))
1412038fd1498Szrj 	return 0;
1412138fd1498Szrj 
1412238fd1498Szrj       tregno = REGNO (target), regno = REGNO (x);
1412338fd1498Szrj       if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
1412438fd1498Szrj 	return target == x;
1412538fd1498Szrj 
1412638fd1498Szrj       endtregno = end_hard_regno (GET_MODE (target), tregno);
1412738fd1498Szrj       endregno = end_hard_regno (GET_MODE (x), regno);
1412838fd1498Szrj 
1412938fd1498Szrj       return endregno > tregno && regno < endtregno;
1413038fd1498Szrj     }
1413138fd1498Szrj 
1413238fd1498Szrj   else if (GET_CODE (body) == PARALLEL)
1413338fd1498Szrj     for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1413438fd1498Szrj       if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
1413538fd1498Szrj 	return 1;
1413638fd1498Szrj 
1413738fd1498Szrj   return 0;
1413838fd1498Szrj }
1413938fd1498Szrj 
1414038fd1498Szrj /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
1414138fd1498Szrj    as appropriate.  I3 and I2 are the insns resulting from the combination
1414238fd1498Szrj    insns including FROM (I2 may be zero).
1414338fd1498Szrj 
1414438fd1498Szrj    ELIM_I2 and ELIM_I1 are either zero or registers that we know will
1414538fd1498Szrj    not need REG_DEAD notes because they are being substituted for.  This
1414638fd1498Szrj    saves searching in the most common cases.
1414738fd1498Szrj 
1414838fd1498Szrj    Each note in the list is either ignored or placed on some insns, depending
1414938fd1498Szrj    on the type of note.  */
1415038fd1498Szrj 
1415138fd1498Szrj static void
distribute_notes(rtx notes,rtx_insn * from_insn,rtx_insn * i3,rtx_insn * i2,rtx elim_i2,rtx elim_i1,rtx elim_i0)1415238fd1498Szrj distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
1415338fd1498Szrj 		  rtx elim_i2, rtx elim_i1, rtx elim_i0)
1415438fd1498Szrj {
1415538fd1498Szrj   rtx note, next_note;
1415638fd1498Szrj   rtx tem_note;
1415738fd1498Szrj   rtx_insn *tem_insn;
1415838fd1498Szrj 
1415938fd1498Szrj   for (note = notes; note; note = next_note)
1416038fd1498Szrj     {
1416138fd1498Szrj       rtx_insn *place = 0, *place2 = 0;
1416238fd1498Szrj 
1416338fd1498Szrj       next_note = XEXP (note, 1);
1416438fd1498Szrj       switch (REG_NOTE_KIND (note))
1416538fd1498Szrj 	{
1416638fd1498Szrj 	case REG_BR_PROB:
1416738fd1498Szrj 	case REG_BR_PRED:
1416838fd1498Szrj 	  /* Doesn't matter much where we put this, as long as it's somewhere.
1416938fd1498Szrj 	     It is preferable to keep these notes on branches, which is most
1417038fd1498Szrj 	     likely to be i3.  */
1417138fd1498Szrj 	  place = i3;
1417238fd1498Szrj 	  break;
1417338fd1498Szrj 
1417438fd1498Szrj 	case REG_NON_LOCAL_GOTO:
1417538fd1498Szrj 	  if (JUMP_P (i3))
1417638fd1498Szrj 	    place = i3;
1417738fd1498Szrj 	  else
1417838fd1498Szrj 	    {
1417938fd1498Szrj 	      gcc_assert (i2 && JUMP_P (i2));
1418038fd1498Szrj 	      place = i2;
1418138fd1498Szrj 	    }
1418238fd1498Szrj 	  break;
1418338fd1498Szrj 
1418438fd1498Szrj 	case REG_EH_REGION:
1418538fd1498Szrj 	  /* These notes must remain with the call or trapping instruction.  */
1418638fd1498Szrj 	  if (CALL_P (i3))
1418738fd1498Szrj 	    place = i3;
1418838fd1498Szrj 	  else if (i2 && CALL_P (i2))
1418938fd1498Szrj 	    place = i2;
1419038fd1498Szrj 	  else
1419138fd1498Szrj 	    {
1419238fd1498Szrj 	      gcc_assert (cfun->can_throw_non_call_exceptions);
1419338fd1498Szrj 	      if (may_trap_p (i3))
1419438fd1498Szrj 		place = i3;
1419538fd1498Szrj 	      else if (i2 && may_trap_p (i2))
1419638fd1498Szrj 		place = i2;
1419738fd1498Szrj 	      /* ??? Otherwise assume we've combined things such that we
1419838fd1498Szrj 		 can now prove that the instructions can't trap.  Drop the
1419938fd1498Szrj 		 note in this case.  */
1420038fd1498Szrj 	    }
1420138fd1498Szrj 	  break;
1420238fd1498Szrj 
1420338fd1498Szrj 	case REG_ARGS_SIZE:
1420438fd1498Szrj 	  /* ??? How to distribute between i3-i1.  Assume i3 contains the
1420538fd1498Szrj 	     entire adjustment.  Assert i3 contains at least some adjust.  */
1420638fd1498Szrj 	  if (!noop_move_p (i3))
1420738fd1498Szrj 	    {
1420838fd1498Szrj 	      poly_int64 old_size, args_size = get_args_size (note);
1420938fd1498Szrj 	      /* fixup_args_size_notes looks at REG_NORETURN note,
1421038fd1498Szrj 		 so ensure the note is placed there first.  */
1421138fd1498Szrj 	      if (CALL_P (i3))
1421238fd1498Szrj 		{
1421338fd1498Szrj 		  rtx *np;
1421438fd1498Szrj 		  for (np = &next_note; *np; np = &XEXP (*np, 1))
1421538fd1498Szrj 		    if (REG_NOTE_KIND (*np) == REG_NORETURN)
1421638fd1498Szrj 		      {
1421738fd1498Szrj 			rtx n = *np;
1421838fd1498Szrj 			*np = XEXP (n, 1);
1421938fd1498Szrj 			XEXP (n, 1) = REG_NOTES (i3);
1422038fd1498Szrj 			REG_NOTES (i3) = n;
1422138fd1498Szrj 			break;
1422238fd1498Szrj 		      }
1422338fd1498Szrj 		}
1422438fd1498Szrj 	      old_size = fixup_args_size_notes (PREV_INSN (i3), i3, args_size);
1422538fd1498Szrj 	      /* emit_call_1 adds for !ACCUMULATE_OUTGOING_ARGS
1422638fd1498Szrj 		 REG_ARGS_SIZE note to all noreturn calls, allow that here.  */
1422738fd1498Szrj 	      gcc_assert (maybe_ne (old_size, args_size)
1422838fd1498Szrj 			  || (CALL_P (i3)
1422938fd1498Szrj 			      && !ACCUMULATE_OUTGOING_ARGS
1423038fd1498Szrj 			      && find_reg_note (i3, REG_NORETURN, NULL_RTX)));
1423138fd1498Szrj 	    }
1423238fd1498Szrj 	  break;
1423338fd1498Szrj 
1423438fd1498Szrj 	case REG_NORETURN:
1423538fd1498Szrj 	case REG_SETJMP:
1423638fd1498Szrj 	case REG_TM:
1423738fd1498Szrj 	case REG_CALL_DECL:
1423838fd1498Szrj 	case REG_CALL_NOCF_CHECK:
1423938fd1498Szrj 	  /* These notes must remain with the call.  It should not be
1424038fd1498Szrj 	     possible for both I2 and I3 to be a call.  */
1424138fd1498Szrj 	  if (CALL_P (i3))
1424238fd1498Szrj 	    place = i3;
1424338fd1498Szrj 	  else
1424438fd1498Szrj 	    {
1424538fd1498Szrj 	      gcc_assert (i2 && CALL_P (i2));
1424638fd1498Szrj 	      place = i2;
1424738fd1498Szrj 	    }
1424838fd1498Szrj 	  break;
1424938fd1498Szrj 
1425038fd1498Szrj 	case REG_UNUSED:
1425138fd1498Szrj 	  /* Any clobbers for i3 may still exist, and so we must process
1425238fd1498Szrj 	     REG_UNUSED notes from that insn.
1425338fd1498Szrj 
1425438fd1498Szrj 	     Any clobbers from i2 or i1 can only exist if they were added by
1425538fd1498Szrj 	     recog_for_combine.  In that case, recog_for_combine created the
1425638fd1498Szrj 	     necessary REG_UNUSED notes.  Trying to keep any original
1425738fd1498Szrj 	     REG_UNUSED notes from these insns can cause incorrect output
1425838fd1498Szrj 	     if it is for the same register as the original i3 dest.
1425938fd1498Szrj 	     In that case, we will notice that the register is set in i3,
1426038fd1498Szrj 	     and then add a REG_UNUSED note for the destination of i3, which
1426138fd1498Szrj 	     is wrong.  However, it is possible to have REG_UNUSED notes from
1426238fd1498Szrj 	     i2 or i1 for register which were both used and clobbered, so
1426338fd1498Szrj 	     we keep notes from i2 or i1 if they will turn into REG_DEAD
1426438fd1498Szrj 	     notes.  */
1426538fd1498Szrj 
1426638fd1498Szrj 	  /* If this register is set or clobbered in I3, put the note there
1426738fd1498Szrj 	     unless there is one already.  */
1426838fd1498Szrj 	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
1426938fd1498Szrj 	    {
1427038fd1498Szrj 	      if (from_insn != i3)
1427138fd1498Szrj 		break;
1427238fd1498Szrj 
1427338fd1498Szrj 	      if (! (REG_P (XEXP (note, 0))
1427438fd1498Szrj 		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
1427538fd1498Szrj 		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
1427638fd1498Szrj 		place = i3;
1427738fd1498Szrj 	    }
1427838fd1498Szrj 	  /* Otherwise, if this register is used by I3, then this register
1427938fd1498Szrj 	     now dies here, so we must put a REG_DEAD note here unless there
1428038fd1498Szrj 	     is one already.  */
1428138fd1498Szrj 	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
1428238fd1498Szrj 		   && ! (REG_P (XEXP (note, 0))
1428338fd1498Szrj 			 ? find_regno_note (i3, REG_DEAD,
1428438fd1498Szrj 					    REGNO (XEXP (note, 0)))
1428538fd1498Szrj 			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
1428638fd1498Szrj 	    {
1428738fd1498Szrj 	      PUT_REG_NOTE_KIND (note, REG_DEAD);
1428838fd1498Szrj 	      place = i3;
1428938fd1498Szrj 	    }
1429038fd1498Szrj 
1429138fd1498Szrj 	  /* A SET or CLOBBER of the REG_UNUSED reg has been removed,
1429238fd1498Szrj 	     but we can't tell which at this point.  We must reset any
1429338fd1498Szrj 	     expectations we had about the value that was previously
1429438fd1498Szrj 	     stored in the reg.  ??? Ideally, we'd adjust REG_N_SETS
1429538fd1498Szrj 	     and, if appropriate, restore its previous value, but we
1429638fd1498Szrj 	     don't have enough information for that at this point.  */
1429738fd1498Szrj 	  else
1429838fd1498Szrj 	    {
1429938fd1498Szrj 	      record_value_for_reg (XEXP (note, 0), NULL, NULL_RTX);
1430038fd1498Szrj 
1430138fd1498Szrj 	      /* Otherwise, if this register is now referenced in i2
1430238fd1498Szrj 		 then the register used to be modified in one of the
1430338fd1498Szrj 		 original insns.  If it was i3 (say, in an unused
1430438fd1498Szrj 		 parallel), it's now completely gone, so the note can
1430538fd1498Szrj 		 be discarded.  But if it was modified in i2, i1 or i0
1430638fd1498Szrj 		 and we still reference it in i2, then we're
1430738fd1498Szrj 		 referencing the previous value, and since the
1430838fd1498Szrj 		 register was modified and REG_UNUSED, we know that
1430938fd1498Szrj 		 the previous value is now dead.  So, if we only
1431038fd1498Szrj 		 reference the register in i2, we change the note to
1431138fd1498Szrj 		 REG_DEAD, to reflect the previous value.  However, if
1431238fd1498Szrj 		 we're also setting or clobbering the register as
1431338fd1498Szrj 		 scratch, we know (because the register was not
1431438fd1498Szrj 		 referenced in i3) that it's unused, just as it was
1431538fd1498Szrj 		 unused before, and we place the note in i2.  */
1431638fd1498Szrj 	      if (from_insn != i3 && i2 && INSN_P (i2)
1431738fd1498Szrj 		  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
1431838fd1498Szrj 		{
1431938fd1498Szrj 		  if (!reg_set_p (XEXP (note, 0), PATTERN (i2)))
1432038fd1498Szrj 		    PUT_REG_NOTE_KIND (note, REG_DEAD);
1432138fd1498Szrj 		  if (! (REG_P (XEXP (note, 0))
1432238fd1498Szrj 			 ? find_regno_note (i2, REG_NOTE_KIND (note),
1432338fd1498Szrj 					    REGNO (XEXP (note, 0)))
1432438fd1498Szrj 			 : find_reg_note (i2, REG_NOTE_KIND (note),
1432538fd1498Szrj 					  XEXP (note, 0))))
1432638fd1498Szrj 		    place = i2;
1432738fd1498Szrj 		}
1432838fd1498Szrj 	    }
1432938fd1498Szrj 
1433038fd1498Szrj 	  break;
1433138fd1498Szrj 
1433238fd1498Szrj 	case REG_EQUAL:
1433338fd1498Szrj 	case REG_EQUIV:
1433438fd1498Szrj 	case REG_NOALIAS:
1433538fd1498Szrj 	  /* These notes say something about results of an insn.  We can
1433638fd1498Szrj 	     only support them if they used to be on I3 in which case they
1433738fd1498Szrj 	     remain on I3.  Otherwise they are ignored.
1433838fd1498Szrj 
1433938fd1498Szrj 	     If the note refers to an expression that is not a constant, we
1434038fd1498Szrj 	     must also ignore the note since we cannot tell whether the
1434138fd1498Szrj 	     equivalence is still true.  It might be possible to do
1434238fd1498Szrj 	     slightly better than this (we only have a problem if I2DEST
1434338fd1498Szrj 	     or I1DEST is present in the expression), but it doesn't
1434438fd1498Szrj 	     seem worth the trouble.  */
1434538fd1498Szrj 
1434638fd1498Szrj 	  if (from_insn == i3
1434738fd1498Szrj 	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
1434838fd1498Szrj 	    place = i3;
1434938fd1498Szrj 	  break;
1435038fd1498Szrj 
1435138fd1498Szrj 	case REG_INC:
1435238fd1498Szrj 	  /* These notes say something about how a register is used.  They must
1435338fd1498Szrj 	     be present on any use of the register in I2 or I3.  */
1435438fd1498Szrj 	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
1435538fd1498Szrj 	    place = i3;
1435638fd1498Szrj 
1435738fd1498Szrj 	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
1435838fd1498Szrj 	    {
1435938fd1498Szrj 	      if (place)
1436038fd1498Szrj 		place2 = i2;
1436138fd1498Szrj 	      else
1436238fd1498Szrj 		place = i2;
1436338fd1498Szrj 	    }
1436438fd1498Szrj 	  break;
1436538fd1498Szrj 
1436638fd1498Szrj 	case REG_LABEL_TARGET:
1436738fd1498Szrj 	case REG_LABEL_OPERAND:
1436838fd1498Szrj 	  /* This can show up in several ways -- either directly in the
1436938fd1498Szrj 	     pattern, or hidden off in the constant pool with (or without?)
1437038fd1498Szrj 	     a REG_EQUAL note.  */
1437138fd1498Szrj 	  /* ??? Ignore the without-reg_equal-note problem for now.  */
1437238fd1498Szrj 	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
1437338fd1498Szrj 	      || ((tem_note = find_reg_note (i3, REG_EQUAL, NULL_RTX))
1437438fd1498Szrj 		  && GET_CODE (XEXP (tem_note, 0)) == LABEL_REF
1437538fd1498Szrj 		  && label_ref_label (XEXP (tem_note, 0)) == XEXP (note, 0)))
1437638fd1498Szrj 	    place = i3;
1437738fd1498Szrj 
1437838fd1498Szrj 	  if (i2
1437938fd1498Szrj 	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
1438038fd1498Szrj 		  || ((tem_note = find_reg_note (i2, REG_EQUAL, NULL_RTX))
1438138fd1498Szrj 		      && GET_CODE (XEXP (tem_note, 0)) == LABEL_REF
1438238fd1498Szrj 		      && label_ref_label (XEXP (tem_note, 0)) == XEXP (note, 0))))
1438338fd1498Szrj 	    {
1438438fd1498Szrj 	      if (place)
1438538fd1498Szrj 		place2 = i2;
1438638fd1498Szrj 	      else
1438738fd1498Szrj 		place = i2;
1438838fd1498Szrj 	    }
1438938fd1498Szrj 
1439038fd1498Szrj 	  /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
1439138fd1498Szrj 	     as a JUMP_LABEL or decrement LABEL_NUSES if it's already
1439238fd1498Szrj 	     there.  */
1439338fd1498Szrj 	  if (place && JUMP_P (place)
1439438fd1498Szrj 	      && REG_NOTE_KIND (note) == REG_LABEL_TARGET
1439538fd1498Szrj 	      && (JUMP_LABEL (place) == NULL
1439638fd1498Szrj 		  || JUMP_LABEL (place) == XEXP (note, 0)))
1439738fd1498Szrj 	    {
1439838fd1498Szrj 	      rtx label = JUMP_LABEL (place);
1439938fd1498Szrj 
1440038fd1498Szrj 	      if (!label)
1440138fd1498Szrj 		JUMP_LABEL (place) = XEXP (note, 0);
1440238fd1498Szrj 	      else if (LABEL_P (label))
1440338fd1498Szrj 		LABEL_NUSES (label)--;
1440438fd1498Szrj 	    }
1440538fd1498Szrj 
1440638fd1498Szrj 	  if (place2 && JUMP_P (place2)
1440738fd1498Szrj 	      && REG_NOTE_KIND (note) == REG_LABEL_TARGET
1440838fd1498Szrj 	      && (JUMP_LABEL (place2) == NULL
1440938fd1498Szrj 		  || JUMP_LABEL (place2) == XEXP (note, 0)))
1441038fd1498Szrj 	    {
1441138fd1498Szrj 	      rtx label = JUMP_LABEL (place2);
1441238fd1498Szrj 
1441338fd1498Szrj 	      if (!label)
1441438fd1498Szrj 		JUMP_LABEL (place2) = XEXP (note, 0);
1441538fd1498Szrj 	      else if (LABEL_P (label))
1441638fd1498Szrj 		LABEL_NUSES (label)--;
1441738fd1498Szrj 	      place2 = 0;
1441838fd1498Szrj 	    }
1441938fd1498Szrj 	  break;
1442038fd1498Szrj 
1442138fd1498Szrj 	case REG_NONNEG:
1442238fd1498Szrj 	  /* This note says something about the value of a register prior
1442338fd1498Szrj 	     to the execution of an insn.  It is too much trouble to see
1442438fd1498Szrj 	     if the note is still correct in all situations.  It is better
1442538fd1498Szrj 	     to simply delete it.  */
1442638fd1498Szrj 	  break;
1442738fd1498Szrj 
1442838fd1498Szrj 	case REG_DEAD:
1442938fd1498Szrj 	  /* If we replaced the right hand side of FROM_INSN with a
1443038fd1498Szrj 	     REG_EQUAL note, the original use of the dying register
1443138fd1498Szrj 	     will not have been combined into I3 and I2.  In such cases,
1443238fd1498Szrj 	     FROM_INSN is guaranteed to be the first of the combined
1443338fd1498Szrj 	     instructions, so we simply need to search back before
1443438fd1498Szrj 	     FROM_INSN for the previous use or set of this register,
1443538fd1498Szrj 	     then alter the notes there appropriately.
1443638fd1498Szrj 
1443738fd1498Szrj 	     If the register is used as an input in I3, it dies there.
1443838fd1498Szrj 	     Similarly for I2, if it is nonzero and adjacent to I3.
1443938fd1498Szrj 
1444038fd1498Szrj 	     If the register is not used as an input in either I3 or I2
1444138fd1498Szrj 	     and it is not one of the registers we were supposed to eliminate,
1444238fd1498Szrj 	     there are two possibilities.  We might have a non-adjacent I2
1444338fd1498Szrj 	     or we might have somehow eliminated an additional register
1444438fd1498Szrj 	     from a computation.  For example, we might have had A & B where
1444538fd1498Szrj 	     we discover that B will always be zero.  In this case we will
1444638fd1498Szrj 	     eliminate the reference to A.
1444738fd1498Szrj 
1444838fd1498Szrj 	     In both cases, we must search to see if we can find a previous
1444938fd1498Szrj 	     use of A and put the death note there.  */
1445038fd1498Szrj 
1445138fd1498Szrj 	  if (from_insn
1445238fd1498Szrj 	      && from_insn == i2mod
1445338fd1498Szrj 	      && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
1445438fd1498Szrj 	    tem_insn = from_insn;
1445538fd1498Szrj 	  else
1445638fd1498Szrj 	    {
1445738fd1498Szrj 	      if (from_insn
1445838fd1498Szrj 		  && CALL_P (from_insn)
1445938fd1498Szrj 		  && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
1446038fd1498Szrj 		place = from_insn;
1446138fd1498Szrj 	      else if (i2 && reg_set_p (XEXP (note, 0), PATTERN (i2)))
1446238fd1498Szrj 		{
1446338fd1498Szrj 		  /* If the new I2 sets the same register that is marked
1446438fd1498Szrj 		     dead in the note, we do not in general know where to
1446538fd1498Szrj 		     put the note.  One important case we _can_ handle is
1446638fd1498Szrj 		     when the note comes from I3.  */
1446738fd1498Szrj 		  if (from_insn == i3)
1446838fd1498Szrj 		    place = i3;
1446938fd1498Szrj 		  else
1447038fd1498Szrj 		    break;
1447138fd1498Szrj 		}
1447238fd1498Szrj 	      else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
1447338fd1498Szrj 		place = i3;
1447438fd1498Szrj 	      else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
1447538fd1498Szrj 		       && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
1447638fd1498Szrj 		place = i2;
1447738fd1498Szrj 	      else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
1447838fd1498Szrj 			&& !(i2mod
1447938fd1498Szrj 			     && reg_overlap_mentioned_p (XEXP (note, 0),
1448038fd1498Szrj 							 i2mod_old_rhs)))
1448138fd1498Szrj 		       || rtx_equal_p (XEXP (note, 0), elim_i1)
1448238fd1498Szrj 		       || rtx_equal_p (XEXP (note, 0), elim_i0))
1448338fd1498Szrj 		break;
1448438fd1498Szrj 	      tem_insn = i3;
1448538fd1498Szrj 	    }
1448638fd1498Szrj 
1448738fd1498Szrj 	  if (place == 0)
1448838fd1498Szrj 	    {
1448938fd1498Szrj 	      basic_block bb = this_basic_block;
1449038fd1498Szrj 
1449138fd1498Szrj 	      for (tem_insn = PREV_INSN (tem_insn); place == 0; tem_insn = PREV_INSN (tem_insn))
1449238fd1498Szrj 		{
1449338fd1498Szrj 		  if (!NONDEBUG_INSN_P (tem_insn))
1449438fd1498Szrj 		    {
1449538fd1498Szrj 		      if (tem_insn == BB_HEAD (bb))
1449638fd1498Szrj 			break;
1449738fd1498Szrj 		      continue;
1449838fd1498Szrj 		    }
1449938fd1498Szrj 
1450038fd1498Szrj 		  /* If the register is being set at TEM_INSN, see if that is all
1450138fd1498Szrj 		     TEM_INSN is doing.  If so, delete TEM_INSN.  Otherwise, make this
1450238fd1498Szrj 		     into a REG_UNUSED note instead. Don't delete sets to
1450338fd1498Szrj 		     global register vars.  */
1450438fd1498Szrj 		  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
1450538fd1498Szrj 		       || !global_regs[REGNO (XEXP (note, 0))])
1450638fd1498Szrj 		      && reg_set_p (XEXP (note, 0), PATTERN (tem_insn)))
1450738fd1498Szrj 		    {
1450838fd1498Szrj 		      rtx set = single_set (tem_insn);
1450938fd1498Szrj 		      rtx inner_dest = 0;
1451038fd1498Szrj 		      rtx_insn *cc0_setter = NULL;
1451138fd1498Szrj 
1451238fd1498Szrj 		      if (set != 0)
1451338fd1498Szrj 			for (inner_dest = SET_DEST (set);
1451438fd1498Szrj 			     (GET_CODE (inner_dest) == STRICT_LOW_PART
1451538fd1498Szrj 			      || GET_CODE (inner_dest) == SUBREG
1451638fd1498Szrj 			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
1451738fd1498Szrj 			     inner_dest = XEXP (inner_dest, 0))
1451838fd1498Szrj 			  ;
1451938fd1498Szrj 
1452038fd1498Szrj 		      /* Verify that it was the set, and not a clobber that
1452138fd1498Szrj 			 modified the register.
1452238fd1498Szrj 
1452338fd1498Szrj 			 CC0 targets must be careful to maintain setter/user
1452438fd1498Szrj 			 pairs.  If we cannot delete the setter due to side
1452538fd1498Szrj 			 effects, mark the user with an UNUSED note instead
1452638fd1498Szrj 			 of deleting it.  */
1452738fd1498Szrj 
1452838fd1498Szrj 		      if (set != 0 && ! side_effects_p (SET_SRC (set))
1452938fd1498Szrj 			  && rtx_equal_p (XEXP (note, 0), inner_dest)
1453038fd1498Szrj 			  && (!HAVE_cc0
1453138fd1498Szrj 			      || (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
1453238fd1498Szrj 				  || ((cc0_setter = prev_cc0_setter (tem_insn)) != NULL
1453338fd1498Szrj 				      && sets_cc0_p (PATTERN (cc0_setter)) > 0))))
1453438fd1498Szrj 			{
1453538fd1498Szrj 			  /* Move the notes and links of TEM_INSN elsewhere.
1453638fd1498Szrj 			     This might delete other dead insns recursively.
1453738fd1498Szrj 			     First set the pattern to something that won't use
1453838fd1498Szrj 			     any register.  */
1453938fd1498Szrj 			  rtx old_notes = REG_NOTES (tem_insn);
1454038fd1498Szrj 
1454138fd1498Szrj 			  PATTERN (tem_insn) = pc_rtx;
1454238fd1498Szrj 			  REG_NOTES (tem_insn) = NULL;
1454338fd1498Szrj 
1454438fd1498Szrj 			  distribute_notes (old_notes, tem_insn, tem_insn, NULL,
1454538fd1498Szrj 					    NULL_RTX, NULL_RTX, NULL_RTX);
1454638fd1498Szrj 			  distribute_links (LOG_LINKS (tem_insn));
1454738fd1498Szrj 
1454838fd1498Szrj 			  unsigned int regno = REGNO (XEXP (note, 0));
1454938fd1498Szrj 			  reg_stat_type *rsp = &reg_stat[regno];
1455038fd1498Szrj 			  if (rsp->last_set == tem_insn)
1455138fd1498Szrj 			    record_value_for_reg (XEXP (note, 0), NULL, NULL_RTX);
1455238fd1498Szrj 
1455338fd1498Szrj 			  SET_INSN_DELETED (tem_insn);
1455438fd1498Szrj 			  if (tem_insn == i2)
1455538fd1498Szrj 			    i2 = NULL;
1455638fd1498Szrj 
1455738fd1498Szrj 			  /* Delete the setter too.  */
1455838fd1498Szrj 			  if (cc0_setter)
1455938fd1498Szrj 			    {
1456038fd1498Szrj 			      PATTERN (cc0_setter) = pc_rtx;
1456138fd1498Szrj 			      old_notes = REG_NOTES (cc0_setter);
1456238fd1498Szrj 			      REG_NOTES (cc0_setter) = NULL;
1456338fd1498Szrj 
1456438fd1498Szrj 			      distribute_notes (old_notes, cc0_setter,
1456538fd1498Szrj 						cc0_setter, NULL,
1456638fd1498Szrj 						NULL_RTX, NULL_RTX, NULL_RTX);
1456738fd1498Szrj 			      distribute_links (LOG_LINKS (cc0_setter));
1456838fd1498Szrj 
1456938fd1498Szrj 			      SET_INSN_DELETED (cc0_setter);
1457038fd1498Szrj 			      if (cc0_setter == i2)
1457138fd1498Szrj 				i2 = NULL;
1457238fd1498Szrj 			    }
1457338fd1498Szrj 			}
1457438fd1498Szrj 		      else
1457538fd1498Szrj 			{
1457638fd1498Szrj 			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
1457738fd1498Szrj 
1457838fd1498Szrj 			  /*  If there isn't already a REG_UNUSED note, put one
1457938fd1498Szrj 			      here.  Do not place a REG_DEAD note, even if
1458038fd1498Szrj 			      the register is also used here; that would not
1458138fd1498Szrj 			      match the algorithm used in lifetime analysis
1458238fd1498Szrj 			      and can cause the consistency check in the
1458338fd1498Szrj 			      scheduler to fail.  */
1458438fd1498Szrj 			  if (! find_regno_note (tem_insn, REG_UNUSED,
1458538fd1498Szrj 						 REGNO (XEXP (note, 0))))
1458638fd1498Szrj 			    place = tem_insn;
1458738fd1498Szrj 			  break;
1458838fd1498Szrj 			}
1458938fd1498Szrj 		    }
1459038fd1498Szrj 		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem_insn))
1459138fd1498Szrj 			   || (CALL_P (tem_insn)
1459238fd1498Szrj 			       && find_reg_fusage (tem_insn, USE, XEXP (note, 0))))
1459338fd1498Szrj 		    {
1459438fd1498Szrj 		      place = tem_insn;
1459538fd1498Szrj 
1459638fd1498Szrj 		      /* If we are doing a 3->2 combination, and we have a
1459738fd1498Szrj 			 register which formerly died in i3 and was not used
1459838fd1498Szrj 			 by i2, which now no longer dies in i3 and is used in
1459938fd1498Szrj 			 i2 but does not die in i2, and place is between i2
1460038fd1498Szrj 			 and i3, then we may need to move a link from place to
1460138fd1498Szrj 			 i2.  */
1460238fd1498Szrj 		      if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
1460338fd1498Szrj 			  && from_insn
1460438fd1498Szrj 			  && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
1460538fd1498Szrj 			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
1460638fd1498Szrj 			{
1460738fd1498Szrj 			  struct insn_link *links = LOG_LINKS (place);
1460838fd1498Szrj 			  LOG_LINKS (place) = NULL;
1460938fd1498Szrj 			  distribute_links (links);
1461038fd1498Szrj 			}
1461138fd1498Szrj 		      break;
1461238fd1498Szrj 		    }
1461338fd1498Szrj 
1461438fd1498Szrj 		  if (tem_insn == BB_HEAD (bb))
1461538fd1498Szrj 		    break;
1461638fd1498Szrj 		}
1461738fd1498Szrj 
1461838fd1498Szrj 	    }
1461938fd1498Szrj 
1462038fd1498Szrj 	  /* If the register is set or already dead at PLACE, we needn't do
1462138fd1498Szrj 	     anything with this note if it is still a REG_DEAD note.
1462238fd1498Szrj 	     We check here if it is set at all, not if is it totally replaced,
1462338fd1498Szrj 	     which is what `dead_or_set_p' checks, so also check for it being
1462438fd1498Szrj 	     set partially.  */
1462538fd1498Szrj 
1462638fd1498Szrj 	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
1462738fd1498Szrj 	    {
1462838fd1498Szrj 	      unsigned int regno = REGNO (XEXP (note, 0));
1462938fd1498Szrj 	      reg_stat_type *rsp = &reg_stat[regno];
1463038fd1498Szrj 
1463138fd1498Szrj 	      if (dead_or_set_p (place, XEXP (note, 0))
1463238fd1498Szrj 		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
1463338fd1498Szrj 		{
1463438fd1498Szrj 		  /* Unless the register previously died in PLACE, clear
1463538fd1498Szrj 		     last_death.  [I no longer understand why this is
1463638fd1498Szrj 		     being done.] */
1463738fd1498Szrj 		  if (rsp->last_death != place)
1463838fd1498Szrj 		    rsp->last_death = 0;
1463938fd1498Szrj 		  place = 0;
1464038fd1498Szrj 		}
1464138fd1498Szrj 	      else
1464238fd1498Szrj 		rsp->last_death = place;
1464338fd1498Szrj 
1464438fd1498Szrj 	      /* If this is a death note for a hard reg that is occupying
1464538fd1498Szrj 		 multiple registers, ensure that we are still using all
1464638fd1498Szrj 		 parts of the object.  If we find a piece of the object
1464738fd1498Szrj 		 that is unused, we must arrange for an appropriate REG_DEAD
1464838fd1498Szrj 		 note to be added for it.  However, we can't just emit a USE
1464938fd1498Szrj 		 and tag the note to it, since the register might actually
1465038fd1498Szrj 		 be dead; so we recourse, and the recursive call then finds
1465138fd1498Szrj 		 the previous insn that used this register.  */
1465238fd1498Szrj 
1465338fd1498Szrj 	      if (place && REG_NREGS (XEXP (note, 0)) > 1)
1465438fd1498Szrj 		{
1465538fd1498Szrj 		  unsigned int endregno = END_REGNO (XEXP (note, 0));
1465638fd1498Szrj 		  bool all_used = true;
1465738fd1498Szrj 		  unsigned int i;
1465838fd1498Szrj 
1465938fd1498Szrj 		  for (i = regno; i < endregno; i++)
1466038fd1498Szrj 		    if ((! refers_to_regno_p (i, PATTERN (place))
1466138fd1498Szrj 			 && ! find_regno_fusage (place, USE, i))
1466238fd1498Szrj 			|| dead_or_set_regno_p (place, i))
1466338fd1498Szrj 		      {
1466438fd1498Szrj 			all_used = false;
1466538fd1498Szrj 			break;
1466638fd1498Szrj 		      }
1466738fd1498Szrj 
1466838fd1498Szrj 		  if (! all_used)
1466938fd1498Szrj 		    {
1467038fd1498Szrj 		      /* Put only REG_DEAD notes for pieces that are
1467138fd1498Szrj 			 not already dead or set.  */
1467238fd1498Szrj 
1467338fd1498Szrj 		      for (i = regno; i < endregno;
1467438fd1498Szrj 			   i += hard_regno_nregs (i, reg_raw_mode[i]))
1467538fd1498Szrj 			{
1467638fd1498Szrj 			  rtx piece = regno_reg_rtx[i];
1467738fd1498Szrj 			  basic_block bb = this_basic_block;
1467838fd1498Szrj 
1467938fd1498Szrj 			  if (! dead_or_set_p (place, piece)
1468038fd1498Szrj 			      && ! reg_bitfield_target_p (piece,
1468138fd1498Szrj 							  PATTERN (place)))
1468238fd1498Szrj 			    {
1468338fd1498Szrj 			      rtx new_note = alloc_reg_note (REG_DEAD, piece,
1468438fd1498Szrj 							     NULL_RTX);
1468538fd1498Szrj 
1468638fd1498Szrj 			      distribute_notes (new_note, place, place,
1468738fd1498Szrj 						NULL, NULL_RTX, NULL_RTX,
1468838fd1498Szrj 						NULL_RTX);
1468938fd1498Szrj 			    }
1469038fd1498Szrj 			  else if (! refers_to_regno_p (i, PATTERN (place))
1469138fd1498Szrj 				   && ! find_regno_fusage (place, USE, i))
1469238fd1498Szrj 			    for (tem_insn = PREV_INSN (place); ;
1469338fd1498Szrj 				 tem_insn = PREV_INSN (tem_insn))
1469438fd1498Szrj 			      {
1469538fd1498Szrj 				if (!NONDEBUG_INSN_P (tem_insn))
1469638fd1498Szrj 				  {
1469738fd1498Szrj 				    if (tem_insn == BB_HEAD (bb))
1469838fd1498Szrj 			 	      break;
1469938fd1498Szrj 				    continue;
1470038fd1498Szrj 				  }
1470138fd1498Szrj 				if (dead_or_set_p (tem_insn, piece)
1470238fd1498Szrj 				    || reg_bitfield_target_p (piece,
1470338fd1498Szrj 							      PATTERN (tem_insn)))
1470438fd1498Szrj 				  {
1470538fd1498Szrj 				    add_reg_note (tem_insn, REG_UNUSED, piece);
1470638fd1498Szrj 				    break;
1470738fd1498Szrj 				  }
1470838fd1498Szrj 			      }
1470938fd1498Szrj 			}
1471038fd1498Szrj 
1471138fd1498Szrj 		      place = 0;
1471238fd1498Szrj 		    }
1471338fd1498Szrj 		}
1471438fd1498Szrj 	    }
1471538fd1498Szrj 	  break;
1471638fd1498Szrj 
1471738fd1498Szrj 	default:
1471838fd1498Szrj 	  /* Any other notes should not be present at this point in the
1471938fd1498Szrj 	     compilation.  */
1472038fd1498Szrj 	  gcc_unreachable ();
1472138fd1498Szrj 	}
1472238fd1498Szrj 
1472338fd1498Szrj       if (place)
1472438fd1498Szrj 	{
1472538fd1498Szrj 	  XEXP (note, 1) = REG_NOTES (place);
1472638fd1498Szrj 	  REG_NOTES (place) = note;
1472738fd1498Szrj 
1472838fd1498Szrj 	  /* Set added_notes_insn to the earliest insn we added a note to.  */
1472938fd1498Szrj 	  if (added_notes_insn == 0
1473038fd1498Szrj 	      || DF_INSN_LUID (added_notes_insn) > DF_INSN_LUID (place))
1473138fd1498Szrj 	    added_notes_insn = place;
1473238fd1498Szrj 	}
1473338fd1498Szrj 
1473438fd1498Szrj       if (place2)
1473538fd1498Szrj 	{
1473638fd1498Szrj 	  add_shallow_copy_of_reg_note (place2, note);
1473738fd1498Szrj 
1473838fd1498Szrj 	  /* Set added_notes_insn to the earliest insn we added a note to.  */
1473938fd1498Szrj 	  if (added_notes_insn == 0
1474038fd1498Szrj 	      || DF_INSN_LUID (added_notes_insn) > DF_INSN_LUID (place2))
1474138fd1498Szrj 	    added_notes_insn = place2;
1474238fd1498Szrj 	}
1474338fd1498Szrj     }
1474438fd1498Szrj }
1474538fd1498Szrj 
1474638fd1498Szrj /* Similarly to above, distribute the LOG_LINKS that used to be present on
1474738fd1498Szrj    I3, I2, and I1 to new locations.  This is also called to add a link
1474838fd1498Szrj    pointing at I3 when I3's destination is changed.  */
1474938fd1498Szrj 
1475038fd1498Szrj static void
distribute_links(struct insn_link * links)1475138fd1498Szrj distribute_links (struct insn_link *links)
1475238fd1498Szrj {
1475338fd1498Szrj   struct insn_link *link, *next_link;
1475438fd1498Szrj 
1475538fd1498Szrj   for (link = links; link; link = next_link)
1475638fd1498Szrj     {
1475738fd1498Szrj       rtx_insn *place = 0;
1475838fd1498Szrj       rtx_insn *insn;
1475938fd1498Szrj       rtx set, reg;
1476038fd1498Szrj 
1476138fd1498Szrj       next_link = link->next;
1476238fd1498Szrj 
1476338fd1498Szrj       /* If the insn that this link points to is a NOTE, ignore it.  */
1476438fd1498Szrj       if (NOTE_P (link->insn))
1476538fd1498Szrj 	continue;
1476638fd1498Szrj 
1476738fd1498Szrj       set = 0;
1476838fd1498Szrj       rtx pat = PATTERN (link->insn);
1476938fd1498Szrj       if (GET_CODE (pat) == SET)
1477038fd1498Szrj 	set = pat;
1477138fd1498Szrj       else if (GET_CODE (pat) == PARALLEL)
1477238fd1498Szrj 	{
1477338fd1498Szrj 	  int i;
1477438fd1498Szrj 	  for (i = 0; i < XVECLEN (pat, 0); i++)
1477538fd1498Szrj 	    {
1477638fd1498Szrj 	      set = XVECEXP (pat, 0, i);
1477738fd1498Szrj 	      if (GET_CODE (set) != SET)
1477838fd1498Szrj 		continue;
1477938fd1498Szrj 
1478038fd1498Szrj 	      reg = SET_DEST (set);
1478138fd1498Szrj 	      while (GET_CODE (reg) == ZERO_EXTRACT
1478238fd1498Szrj 		     || GET_CODE (reg) == STRICT_LOW_PART
1478338fd1498Szrj 		     || GET_CODE (reg) == SUBREG)
1478438fd1498Szrj 		reg = XEXP (reg, 0);
1478538fd1498Szrj 
1478638fd1498Szrj 	      if (!REG_P (reg))
1478738fd1498Szrj 		continue;
1478838fd1498Szrj 
1478938fd1498Szrj 	      if (REGNO (reg) == link->regno)
1479038fd1498Szrj 		break;
1479138fd1498Szrj 	    }
1479238fd1498Szrj 	  if (i == XVECLEN (pat, 0))
1479338fd1498Szrj 	    continue;
1479438fd1498Szrj 	}
1479538fd1498Szrj       else
1479638fd1498Szrj 	continue;
1479738fd1498Szrj 
1479838fd1498Szrj       reg = SET_DEST (set);
1479938fd1498Szrj 
1480038fd1498Szrj       while (GET_CODE (reg) == ZERO_EXTRACT
1480138fd1498Szrj 	     || GET_CODE (reg) == STRICT_LOW_PART
1480238fd1498Szrj 	     || GET_CODE (reg) == SUBREG)
1480338fd1498Szrj 	reg = XEXP (reg, 0);
1480438fd1498Szrj 
1480538fd1498Szrj       if (reg == pc_rtx)
1480638fd1498Szrj 	continue;
1480738fd1498Szrj 
1480838fd1498Szrj       /* A LOG_LINK is defined as being placed on the first insn that uses
1480938fd1498Szrj 	 a register and points to the insn that sets the register.  Start
1481038fd1498Szrj 	 searching at the next insn after the target of the link and stop
1481138fd1498Szrj 	 when we reach a set of the register or the end of the basic block.
1481238fd1498Szrj 
1481338fd1498Szrj 	 Note that this correctly handles the link that used to point from
1481438fd1498Szrj 	 I3 to I2.  Also note that not much searching is typically done here
1481538fd1498Szrj 	 since most links don't point very far away.  */
1481638fd1498Szrj 
1481738fd1498Szrj       for (insn = NEXT_INSN (link->insn);
1481838fd1498Szrj 	   (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1481938fd1498Szrj 		     || BB_HEAD (this_basic_block->next_bb) != insn));
1482038fd1498Szrj 	   insn = NEXT_INSN (insn))
1482138fd1498Szrj 	if (DEBUG_INSN_P (insn))
1482238fd1498Szrj 	  continue;
1482338fd1498Szrj 	else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
1482438fd1498Szrj 	  {
1482538fd1498Szrj 	    if (reg_referenced_p (reg, PATTERN (insn)))
1482638fd1498Szrj 	      place = insn;
1482738fd1498Szrj 	    break;
1482838fd1498Szrj 	  }
1482938fd1498Szrj 	else if (CALL_P (insn)
1483038fd1498Szrj 		 && find_reg_fusage (insn, USE, reg))
1483138fd1498Szrj 	  {
1483238fd1498Szrj 	    place = insn;
1483338fd1498Szrj 	    break;
1483438fd1498Szrj 	  }
1483538fd1498Szrj 	else if (INSN_P (insn) && reg_set_p (reg, insn))
1483638fd1498Szrj 	  break;
1483738fd1498Szrj 
1483838fd1498Szrj       /* If we found a place to put the link, place it there unless there
1483938fd1498Szrj 	 is already a link to the same insn as LINK at that point.  */
1484038fd1498Szrj 
1484138fd1498Szrj       if (place)
1484238fd1498Szrj 	{
1484338fd1498Szrj 	  struct insn_link *link2;
1484438fd1498Szrj 
1484538fd1498Szrj 	  FOR_EACH_LOG_LINK (link2, place)
1484638fd1498Szrj 	    if (link2->insn == link->insn && link2->regno == link->regno)
1484738fd1498Szrj 	      break;
1484838fd1498Szrj 
1484938fd1498Szrj 	  if (link2 == NULL)
1485038fd1498Szrj 	    {
1485138fd1498Szrj 	      link->next = LOG_LINKS (place);
1485238fd1498Szrj 	      LOG_LINKS (place) = link;
1485338fd1498Szrj 
1485438fd1498Szrj 	      /* Set added_links_insn to the earliest insn we added a
1485538fd1498Szrj 		 link to.  */
1485638fd1498Szrj 	      if (added_links_insn == 0
1485738fd1498Szrj 		  || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
1485838fd1498Szrj 		added_links_insn = place;
1485938fd1498Szrj 	    }
1486038fd1498Szrj 	}
1486138fd1498Szrj     }
1486238fd1498Szrj }
1486338fd1498Szrj 
1486438fd1498Szrj /* Check for any register or memory mentioned in EQUIV that is not
1486538fd1498Szrj    mentioned in EXPR.  This is used to restrict EQUIV to "specializations"
1486638fd1498Szrj    of EXPR where some registers may have been replaced by constants.  */
1486738fd1498Szrj 
1486838fd1498Szrj static bool
unmentioned_reg_p(rtx equiv,rtx expr)1486938fd1498Szrj unmentioned_reg_p (rtx equiv, rtx expr)
1487038fd1498Szrj {
1487138fd1498Szrj   subrtx_iterator::array_type array;
1487238fd1498Szrj   FOR_EACH_SUBRTX (iter, array, equiv, NONCONST)
1487338fd1498Szrj     {
1487438fd1498Szrj       const_rtx x = *iter;
1487538fd1498Szrj       if ((REG_P (x) || MEM_P (x))
1487638fd1498Szrj 	  && !reg_mentioned_p (x, expr))
1487738fd1498Szrj 	return true;
1487838fd1498Szrj     }
1487938fd1498Szrj   return false;
1488038fd1498Szrj }
1488138fd1498Szrj 
1488238fd1498Szrj DEBUG_FUNCTION void
dump_combine_stats(FILE * file)1488338fd1498Szrj dump_combine_stats (FILE *file)
1488438fd1498Szrj {
1488538fd1498Szrj   fprintf
1488638fd1498Szrj     (file,
1488738fd1498Szrj      ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
1488838fd1498Szrj      combine_attempts, combine_merges, combine_extras, combine_successes);
1488938fd1498Szrj }
1489038fd1498Szrj 
1489138fd1498Szrj void
dump_combine_total_stats(FILE * file)1489238fd1498Szrj dump_combine_total_stats (FILE *file)
1489338fd1498Szrj {
1489438fd1498Szrj   fprintf
1489538fd1498Szrj     (file,
1489638fd1498Szrj      "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
1489738fd1498Szrj      total_attempts, total_merges, total_extras, total_successes);
1489838fd1498Szrj }
1489938fd1498Szrj 
1490038fd1498Szrj /* Try combining insns through substitution.  */
1490138fd1498Szrj static unsigned int
rest_of_handle_combine(void)1490238fd1498Szrj rest_of_handle_combine (void)
1490338fd1498Szrj {
1490438fd1498Szrj   int rebuild_jump_labels_after_combine;
1490538fd1498Szrj 
1490638fd1498Szrj   df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
1490738fd1498Szrj   df_note_add_problem ();
1490838fd1498Szrj   df_analyze ();
1490938fd1498Szrj 
1491038fd1498Szrj   regstat_init_n_sets_and_refs ();
1491138fd1498Szrj   reg_n_sets_max = max_reg_num ();
1491238fd1498Szrj 
1491338fd1498Szrj   rebuild_jump_labels_after_combine
1491438fd1498Szrj     = combine_instructions (get_insns (), max_reg_num ());
1491538fd1498Szrj 
1491638fd1498Szrj   /* Combining insns may have turned an indirect jump into a
1491738fd1498Szrj      direct jump.  Rebuild the JUMP_LABEL fields of jumping
1491838fd1498Szrj      instructions.  */
1491938fd1498Szrj   if (rebuild_jump_labels_after_combine)
1492038fd1498Szrj     {
1492138fd1498Szrj       if (dom_info_available_p (CDI_DOMINATORS))
1492238fd1498Szrj 	free_dominance_info (CDI_DOMINATORS);
1492338fd1498Szrj       timevar_push (TV_JUMP);
1492438fd1498Szrj       rebuild_jump_labels (get_insns ());
1492538fd1498Szrj       cleanup_cfg (0);
1492638fd1498Szrj       timevar_pop (TV_JUMP);
1492738fd1498Szrj     }
1492838fd1498Szrj 
1492938fd1498Szrj   regstat_free_n_sets_and_refs ();
1493038fd1498Szrj   return 0;
1493138fd1498Szrj }
1493238fd1498Szrj 
1493338fd1498Szrj namespace {
1493438fd1498Szrj 
1493538fd1498Szrj const pass_data pass_data_combine =
1493638fd1498Szrj {
1493738fd1498Szrj   RTL_PASS, /* type */
1493838fd1498Szrj   "combine", /* name */
1493938fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
1494038fd1498Szrj   TV_COMBINE, /* tv_id */
1494138fd1498Szrj   PROP_cfglayout, /* properties_required */
1494238fd1498Szrj   0, /* properties_provided */
1494338fd1498Szrj   0, /* properties_destroyed */
1494438fd1498Szrj   0, /* todo_flags_start */
1494538fd1498Szrj   TODO_df_finish, /* todo_flags_finish */
1494638fd1498Szrj };
1494738fd1498Szrj 
1494838fd1498Szrj class pass_combine : public rtl_opt_pass
1494938fd1498Szrj {
1495038fd1498Szrj public:
pass_combine(gcc::context * ctxt)1495138fd1498Szrj   pass_combine (gcc::context *ctxt)
1495238fd1498Szrj     : rtl_opt_pass (pass_data_combine, ctxt)
1495338fd1498Szrj   {}
1495438fd1498Szrj 
1495538fd1498Szrj   /* opt_pass methods: */
gate(function *)1495638fd1498Szrj   virtual bool gate (function *) { return (optimize > 0); }
execute(function *)1495738fd1498Szrj   virtual unsigned int execute (function *)
1495838fd1498Szrj     {
1495938fd1498Szrj       return rest_of_handle_combine ();
1496038fd1498Szrj     }
1496138fd1498Szrj 
1496238fd1498Szrj }; // class pass_combine
1496338fd1498Szrj 
1496438fd1498Szrj } // anon namespace
1496538fd1498Szrj 
1496638fd1498Szrj rtl_opt_pass *
make_pass_combine(gcc::context * ctxt)1496738fd1498Szrj make_pass_combine (gcc::context *ctxt)
1496838fd1498Szrj {
1496938fd1498Szrj   return new pass_combine (ctxt);
1497038fd1498Szrj }
14971